Decouple Fault Management from stx-config

Create fault management REST API service
Create fault management client and CLI shell
Add a python extension for fault management application APIs
Update fault management python APIs to use the python extension
Update fault manager to retrieve the SNMP configuration from the config file

Story: 2002828
Task: 22747

Depends-On: https://review.openstack.org/#/c/592176/
Change-Id: I888d8d23edf75d05d51594ccca55570ae366c848
Signed-off-by: Tao Liu <tao.liu@windriver.com>
This commit is contained in:
Tao Liu 2018-08-13 11:41:48 -04:00
parent 7bdf6fd47c
commit c8159ea6cb
118 changed files with 9365 additions and 342 deletions

View File

@ -2,5 +2,7 @@ fm-api
fm-common
fm-mgr
fm-doc
fm-rest-api
python-fmclient
snmp-ext
snmp-audittrail

View File

@ -1,5 +1,5 @@
#
# Copyright (c) 2013-2014 Wind River Systems, Inc.
# Copyright (c) 2013-2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
@ -11,9 +11,9 @@
#
import copy
import subprocess
from . import constants
import six
import fm_core
class ClientException(Exception):
@ -31,8 +31,8 @@ class ClientException(Exception):
# on the alarm. Optional.
# alarm_type: see ALARM_TYPE
# probable_cause: see ALARM_PROBABLE_CAUSE
# proposed_repair_action:free-format string providing additional details on how to
# clear the alarm. Optional.
# proposed_repair_action:free-format string providing additional details on
# how to clear the alarm. Optional.
# service_affecting: true/false, default to false
# suppression: true/false (allowed/not-allowed), default to false
# uuid: unique identifier of an active alarm instance, filled by FM system
@ -76,67 +76,59 @@ class FaultAPIs(object):
self._check_required_attributes(data)
self._validate_attributes(data)
buff = self._alarm_to_str(data)
cmd = constants.FM_CLIENT_SET_FAULT + '"' + buff + '"'
resp = self._run_cmd_and_get_resp(cmd)
if (resp[0] == "Ok") and (len(resp) > 1):
return resp[1]
else:
try:
return fm_core.set(buff)
except (RuntimeError, SystemError, TypeError):
return None
def clear_fault(self, alarm_id, entity_instance_id):
sep = constants.FM_CLIENT_STR_SEP
buff = (sep + self._check_val(alarm_id) + sep +
self._check_val(entity_instance_id) + sep)
cmd = constants.FM_CLIENT_CLEAR_FAULT + '"' + buff + '"'
resp = self._run_cmd_and_get_resp(cmd)
if resp[0] == "Ok":
return True
else:
try:
return fm_core.clear(buff)
except (RuntimeError, SystemError, TypeError):
return False
def get_fault(self, alarm_id, entity_instance_id):
sep = constants.FM_CLIENT_STR_SEP
buff = (sep + self._check_val(alarm_id) + sep +
self._check_val(entity_instance_id) + sep)
cmd = constants.FM_CLIENT_GET_FAULT + '"' + buff + '"'
resp = self._run_cmd_and_get_resp(cmd)
if (resp[0] == "Ok") and (len(resp) > 1):
return self._str_to_alarm(resp[1])
else:
try:
resp = fm_core.get(buff)
return self._str_to_alarm(resp) if resp else None
except (RuntimeError, SystemError, TypeError):
return None
def clear_all(self, entity_instance_id):
cmd = constants.FM_CLIENT_CLEAR_ALL + '"' + entity_instance_id + '"'
resp = self._run_cmd_and_get_resp(cmd)
if resp[0] == "Ok":
return True
else:
try:
return fm_core.clear_all(entity_instance_id)
except (RuntimeError, SystemError, TypeError):
return False
def get_faults(self, entity_instance_id):
cmd = constants.FM_CLIENT_GET_FAULTS + '"' + entity_instance_id + '"'
resp = self._run_cmd_and_get_resp(cmd)
data = []
if resp[0] == "Ok":
for i in range(1, len(resp)):
alarm = self._str_to_alarm(resp[i])
data.append(alarm)
return data
else:
return None
try:
resp = fm_core.get_by_eid(entity_instance_id)
if resp is not None:
data = []
for i in resp:
data.append(self._str_to_alarm(i))
return data
except (RuntimeError, SystemError, TypeError):
pass
return None
def get_faults_by_id(self, alarm_id):
cmd = constants.FM_CLIENT_GET_FAULTS_BY_ID + '"' + alarm_id + '"'
resp = self._run_cmd_and_get_resp(cmd)
data = []
if resp[0] == "Ok":
for i in range(1, len(resp)):
alarm = self._str_to_alarm(resp[i])
data.append(alarm)
return data
else:
return None
try:
resp = fm_core.get_by_aid(alarm_id)
if resp is not None:
data = []
for i in resp:
data.append(self._str_to_alarm(i))
return data
except (RuntimeError, SystemError, TypeError):
pass
return None
@staticmethod
def _check_val(data):
@ -177,21 +169,6 @@ class FaultAPIs(object):
line[constants.FM_TIMESTAMP_INDEX])
return data
@staticmethod
def _run_cmd_and_get_resp(cmd):
resp = []
cmd = cmd.encode('utf-8')
pro = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
output = pro.communicate()[0]
lines = output.split('\n')
for line in lines:
if line != '':
resp.append(line)
if len(resp) == 0:
resp.append("Unknown")
return resp
@staticmethod
def _check_required_attributes(data):
if data.alarm_id is None:

View File

@ -1,6 +1,7 @@
%define local_dir /usr/local
%define local_bindir %{local_dir}/bin
%define cgcs_doc_deploy_dir /opt/deploy/cgcs_doc
%define pythonroot /usr/lib64/python2.7/site-packages
Summary: CGTS Platform Fault Management Common Package
Name: fm-common
@ -15,6 +16,7 @@ BuildRequires: util-linux
BuildRequires: postgresql-devel
BuildRequires: libuuid-devel
BuildRequires: python-devel
BuildRequires: python-setuptools
%package -n fm-common-dev
Summary: CGTS Platform Fault Management Common Package - Development files
@ -47,6 +49,7 @@ VER=%{version}
MAJOR=`echo $VER | awk -F . '{print $1}'`
MINOR=`echo $VER | awk -F . '{print $2}'`
make MAJOR=$MAJOR MINOR=$MINOR %{?_smp_mflags}
%{__python} setup.py build
%install
rm -rf $RPM_BUILD_ROOT
@ -55,9 +58,18 @@ MAJOR=`echo $VER | awk -F . '{print $1}'`
MINOR=`echo $VER | awk -F . '{print $2}'`
make DEST_DIR=$RPM_BUILD_ROOT BIN_DIR=%{local_bindir} LIB_DIR=%{_libdir} INC_DIR=%{_includedir} MAJOR=$MAJOR MINOR=$MINOR install_non_bb
%{__python} setup.py install --root=%{buildroot} \
--install-lib=%{pythonroot} \
--prefix=/usr \
--install-data=/usr/share
install -d $RPM_BUILD_ROOT/usr/bin
install -m 755 fm_db_sync_event_suppression.py $RPM_BUILD_ROOT/usr/bin/fm_db_sync_event_suppression.py
# install the headers that used by fm-mgr package
install -m 644 -p -D fmConfig.h %{buildroot}%{_includedir}/fmConfig.h
install -m 644 -p -D fmLog.h %{buildroot}%{_includedir}/fmLog.h
CGCS_DOC_DEPLOY=$RPM_BUILD_ROOT/%{cgcs_doc_deploy_dir}
install -d $CGCS_DOC_DEPLOY
# install fmAlarm.h in CGCS_DOC_DEPLOY_DIR
@ -75,6 +87,9 @@ rm -rf $RPM_BUILD_ROOT
%{_libdir}/*.so.*
/usr/bin/fm_db_sync_event_suppression.py
%{pythonroot}/fm_core.so
%{pythonroot}/fm_core-*.egg-info
%files -n fm-common-dev
%defattr(-,root,root,-)
%{_includedir}/*

View File

@ -1,6 +1,6 @@
SRCS = fmAPI.cpp fmFile.cpp fmLog.cpp fmMsgServer.cpp fmMutex.cpp fmSocket.cpp fmThread.cpp fmTime.cpp \
fmAlarmUtils.cpp fmDb.cpp fmDbUtils.cpp fmDbAlarm.cpp fmSnmpUtils.cpp \
fmDbEventLog.cpp fmEventSuppression.cpp
fmDbEventLog.cpp fmEventSuppression.cpp fmConfig.cpp
CLI_SRCS = fm_cli.cpp
OBJS = $(SRCS:.cpp=.o)
CLI_OBJS = fm_cli.o
@ -9,7 +9,7 @@ INCLUDES = -I./
CCFLAGS = -g -O2 -Wall -Werror -fPIC
LIBFMCOMMON_SO := libfmcommon.so
build: lib fmClientCli
build: lib fmClientCli
.cpp.o:
$(CXX) $(CCFLAGS) $(INCLUDES) $(EXTRACCFLAGS) -c $< -o $@

View File

@ -472,7 +472,7 @@ bool fm_alarm_to_string(const SFmAlarmDataT *alarm, std::string &str) {
return str.size()>0;
}
bool fm_alarm_from_string(const std::string &alstr,SFmAlarmDataT *a) {
bool fm_alarm_from_string(const std::string &alstr, SFmAlarmDataT *a) {
strvect_t s;
str_to_vector(alstr, s);

View File

@ -0,0 +1,92 @@
//
// Copyright (c) 2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
#include <stdio.h>
#include <stdlib.h>
#include <string>
#include <map>
#include "fmAPI.h"
#include "fmLog.h"
#include "fmFile.h"
#include "fmConfig.h"
#include "fmMutex.h"
#include "fmConstants.h"
#include "fmSnmpConstants.h"
#include "fmSnmpUtils.h"
typedef std::map<std::string,std::string> configParams;
static const char *conf = NULL;
static int config_loaded = false;
CFmMutex & getConfMutex(){
static CFmMutex *m = new CFmMutex;
return *m;
}
configParams &getConfigMap(){
static configParams conf;
return conf;
}
void fm_conf_set_file(const char *fn){
conf = fn;
}
void fm_get_config_paramters(){
CfmFile f;
std::string delimiter = "=";
std::string line, key, value;
size_t pos = 0;
if (conf == NULL){
FM_ERROR_LOG("The config file is not set\n");
exit(-1);
}
if (!f.open(conf, CfmFile::READ, false)){
FM_ERROR_LOG("Failed to open config file: %s\n", conf);
exit(-1);
}
while (true){
if (!f.read_line(line)) break;
if (line.size() == 0) continue;
if (line[0] == '#') continue;
pos = line.find(delimiter);
key = line.substr(0, pos);
value = line.erase(0, pos + delimiter.length());
getConfigMap()[key] = value;
if (key.compare(FM_SNMP_TRAPDEST) == 0){
set_trap_dest_list(value);
}
if (key.compare(FM_SQL_CONNECTION) != 0){
// Don't log sql_connection, as it has a password
FM_INFO_LOG("Config key (%s), value (%s)",
key.c_str(), value.c_str());
}
}
}
bool fm_get_config_key(std::string &key, std::string &val){
configParams::iterator it;
CFmMutexGuard m(getConfMutex());
if (!config_loaded){
fm_get_config_paramters();
config_loaded = true;
}
it = getConfigMap().find(key);
if (it != getConfigMap().end()){
val = it->second;
return true;
}
return false;
}

View File

@ -0,0 +1,18 @@
//
// Copyright (c) 2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
#ifndef FMCONFIG_H_
#define FMCONFIG_H_
#include <string>
void fm_conf_set_file(const char *fn);
void fm_get_config_paramters();
bool fm_get_config_key(std::string &key, std::string &val);
#endif /* FMCONFIG_H_ */

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2014 Wind River Systems, Inc.
// Copyright (c) 2014-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
@ -16,10 +16,10 @@
#define FM_DB_TABLE_COUNT_COLUMN "count"
/* Alarm table name */
#define FM_ALARM_TABLE_NAME "i_alarm"
#define FM_ALARM_TABLE_NAME "alarm"
/* Event log table name */
#define FM_EVENT_LOG_TABLE_NAME "i_event_log"
#define FM_EVENT_LOG_TABLE_NAME "event_log"
/* Event suppression table name */
#define FM_EVENT_SUPPRESSION_TABLE_NAME "event_suppression"
@ -81,11 +81,6 @@
#define FM_EVENT_SUPPRESSION_UNSUPPRESSED "unsuppressed"
#define FM_EVENT_SUPPRESSION_NONE "None"
/* System table name */
#define FM_SYSTEM_TABLE_NAME "i_system"
#define FM_SYSTEM_NAME_COLUMN "name"
#define FM_SYSTEM_REGION_COLUMN "region_name"
#define FM_ENTITY_ROOT_KEY "system="
#define FM_ENTITY_REGION_KEY "region="
@ -93,6 +88,10 @@
/* config keys */
#define FM_SQL_CONNECTION "sql_connection"
#define FM_EVENT_LOG_MAX_SIZE "event_log_max_size"
#define FM_SYSTEM_NAME "system_name"
#define FM_REGION_NAME "region_name"
#define FM_DEBUG_FLAG "debug"
#define FM_STRING_TRUE "True"
#define CLEAR_ALL_REASON_TEXT "System initiated hierarchical alarm clear"

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2016 Wind River Systems, Inc.
// Copyright (c) 2016-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
@ -16,7 +16,7 @@
#include "fmAlarmUtils.h"
#include "fmDbUtils.h"
#include "fmDb.h"
#include "fmDbConstants.h"
#include "fmConstants.h"
#include "fmThread.h"
@ -62,11 +62,7 @@ bool CFmDBSession::connect(const char *uri){
m_conn.uri = uri;
val = get_parameter_status("standard_conforming_strings");
//FM_INFO_LOG("connect: server standard_conforming_strings parameter: %s",
// val ? val : "unavailable");
m_conn.equote = (val && (0 == strcmp("off", val)));
//FM_INFO_LOG("connect: server requires E'' quotes: %s", m_conn.equote ? "YES" : "NO");
m_conn.server_version = PQserverVersion(m_conn.pgconn);
m_conn.protocol = PQprotocolVersion(m_conn.pgconn);
m_conn.encoding = get_parameter_status("client_encoding");
@ -132,7 +128,7 @@ bool CFmDBSession::query(const char *db_cmd,fm_db_result_t & result) {
return true;
}
bool CFmDBSession::cmd(const char *db_cmd){
bool CFmDBSession::cmd(const char *db_cmd, bool check_row){
PGresult *res;
bool rc = true;
@ -147,7 +143,7 @@ bool CFmDBSession::cmd(const char *db_cmd){
FM_ERROR_LOG("Failed to execute (%s) (%s)", db_cmd, PQresultErrorMessage(res));
rc = false;
}
if (rc){
if (rc && check_row){
int row = atoi(PQcmdTuples(res));
FM_DEBUG_LOG("SQL command returned successful: %d rows affected.\n", row);
if (row < 1) rc = false;

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2014 Wind River Systems, Inc.
// Copyright (c) 2014-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
@ -53,7 +53,7 @@ public:
bool reconnect();
bool query(const char *db_cmd,fm_db_result_t & result);
bool cmd(const char *db_cmd);
bool cmd(const char *db_cmd, bool check_row=true);
bool params_cmd(fm_db_util_sql_params & sql_params);
PGconn* get_pgconn(){

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2014 Wind River Systems, Inc.
// Copyright (c) 2014-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
@ -11,7 +11,7 @@
#include "fmLog.h"
#include "fmDbAlarm.h"
#include "fmAlarmUtils.h"
#include "fmDbConstants.h"
#include "fmConstants.h"
#include "fmDbUtils.h"
typedef std::map<int,std::string> itos_t;
@ -319,7 +319,7 @@ bool CFmDbAlarmOperation::get_all_alarms(CFmDBSession &sess, SFmAlarmDataT **ala
if (!get_alarms(sess, NULL, res))
return false;
std::string sname = fm_db_util_get_system_name(sess);
std::string sname = fm_db_util_get_system_name();
unsigned int found_num_alarms = res.size();
@ -436,7 +436,7 @@ bool CFmDbAlarmOperation::get_all_history_alarms(CFmDBSession &sess, SFmAlarmDat
*alarms = NULL;
if (!get_history(sess,res)) return false;
std::string sname = fm_db_util_get_system_name(sess);
std::string sname = fm_db_util_get_system_name();
unsigned int found_num_alarms = res.size();

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2014 Wind River Systems, Inc.
// Copyright (c) 2014-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
@ -13,7 +13,7 @@
#include <map>
#include "fmAPI.h"
#include "fmDbConstants.h"
#include "fmConstants.h"
#include "fmDb.h"
class CFmDbAlarm {

View File

@ -12,7 +12,7 @@
#include "fmDbAlarm.h"
#include "fmDbEventLog.h"
#include "fmAlarmUtils.h"
#include "fmDbConstants.h"
#include "fmConstants.h"
#include "fmDbUtils.h"
typedef std::map<int,std::string> itos_t;
@ -291,7 +291,7 @@ bool CFmDbEventLogOperation::get_all_event_logs(CFmDBSession &sess, SFmAlarmData
if (!get_event_logs(sess, res)) return false;
std::string sname = fm_db_util_get_system_name(sess);
std::string sname = fm_db_util_get_system_name();
unsigned int found_num_logs = res.size();

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2016 Wind River Systems, Inc.
// Copyright (c) 2016-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
@ -13,7 +13,7 @@
#include <map>
#include "fmAPI.h"
#include "fmDbConstants.h"
#include "fmConstants.h"
#include "fmDb.h"
typedef std::map<int,std::string> itos_t;

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2014 Wind River Systems, Inc.
// Copyright (c) 2014-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
@ -25,29 +25,18 @@
#include "fmDb.h"
#include "fmDbUtils.h"
#include "fmDbAPI.h"
#include "fmDbConstants.h"
#include "fmConstants.h"
#include "fmAlarmUtils.h"
#include "fmConfig.h"
typedef std::map<std::string,std::string> configParams;
static const char *conf = NULL;
static pthread_mutex_t mutex = PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP;
CFmMutex & getConfMutex(){
static CFmMutex *m = new CFmMutex;
return *m;
}
configParams &getConfigMap(){
static configParams conf;
return conf;
}
void FM_DB_UT_NAME_VAL(
std::string &result,
const std::string &lhs, const std::string &rhs) {
std::string &result,
const std::string &lhs, const std::string &rhs) {
result+= lhs;
result+= " = '";
result+=rhs;
@ -55,8 +44,8 @@ void FM_DB_UT_NAME_VAL(
}
void FM_DB_UT_NAME_PARAM(
std::string &result,
const std::string &lhs, const std::string &rhs) {
std::string &result,
const std::string &lhs, const std::string &rhs) {
result+= lhs;
result+= "=";
result+=rhs;
@ -90,40 +79,6 @@ static int get_oldest_id(CFmDBSession &sess, const char* db_table){
return id;
}
static void get_config_parameters(){
CfmFile f;
std::string delimiter = "=";
std::string line, key, value;
size_t pos = 0;
if (conf == NULL){
FM_ERROR_LOG("The config file is not set\n");
exit(-1);
}
if (!f.open(conf, CfmFile::READ, false)){
FM_ERROR_LOG("Failed to open config file: %s\n", conf);
exit(-1);
}
while (true){
if (!f.read_line(line)) break;
if (line.size() == 0) continue;
if (line[0] == '#') continue;
pos = line.find(delimiter);
key = line.substr(0, pos);
value = line.erase(0, pos + delimiter.length());
getConfigMap()[key] = value;
if (key.compare("sql_connection") != 0){
// Don't log sql_connection, as it has a password
FM_INFO_LOG("Config key (%s), value (%s)",
key.c_str(), value.c_str());
}
}
}
static inline CFmDBSession & FmDbSessionFromHandle(TFmAlarmSessionT *p){
return *((CFmDBSession*)p);
@ -189,7 +144,7 @@ int fm_db_util_string_to_int(std::string val){
}
void fm_db_util_make_timestamp_string(std::string &tstr, FMTimeT tm,
bool snmp){
bool snmp){
struct timespec ts;
if (tm != 0){
ts.tv_sec = tm / 1000000;
@ -517,28 +472,6 @@ bool fm_db_util_build_sql_delete_all(const char* db_table, const char *id,
return true;
}
void fm_db_util_set_conf_file(const char *fn){
conf = fn;
}
bool fm_db_util_get_config(std::string &key, std::string &val){
configParams::iterator it;
static int loaded = false;
CFmMutexGuard m(getConfMutex());
if (!loaded){
get_config_parameters();
loaded = true;
}
it = getConfigMap().find(key);
if (it != getConfigMap().end()){
val = it->second;
return true;
}
return false;
}
int & fm_get_alarm_history_max_size(){
static int max_size = 0;
@ -546,7 +479,7 @@ int & fm_get_alarm_history_max_size(){
if (max_size == 0){
std::string val;
std::string key = FM_EVENT_LOG_MAX_SIZE;
if (fm_db_util_get_config(key, val)){
if (fm_get_config_key(key, val)){
max_size = fm_db_util_string_to_int(val);
}else{
FM_ERROR_LOG("Fail to get config value for (%s)\n", key.c_str());
@ -561,7 +494,7 @@ int & fm_get_log_max_size(){
if (max_size == 0){
std::string val;
std::string key = FM_EVENT_LOG_MAX_SIZE;
if (fm_db_util_get_config(key, val)){
if (fm_get_config_key(key, val)){
max_size = fm_db_util_string_to_int(val);
}else{
FM_ERROR_LOG("Fail to get config value for (%s)\n", key.c_str());
@ -570,34 +503,21 @@ int & fm_get_log_max_size(){
return max_size;
}
std::string fm_db_util_get_system_name(CFmDBSession &sess){
fm_db_result_t res;
std::string cmd;
std::string fm_db_util_get_system_info(const std::string prefix, std::string key){
std::string val;
std::string name = "";
fm_db_util_build_sql_query(FM_SYSTEM_TABLE_NAME, NULL, cmd);
if (sess.query(cmd.c_str(), res)){
if (res.size() > 0){
std::map<std::string,std::string> entry = res[0];
name = FM_ENTITY_ROOT_KEY + entry[FM_SYSTEM_NAME_COLUMN];
}
if (fm_get_config_key(key, val)){
name = prefix + val;
}
return name;
}
std::string fm_db_util_get_region_name(CFmDBSession &sess){
fm_db_result_t res;
std::string cmd;
std::string name = "";
std::string fm_db_util_get_system_name(){
return fm_db_util_get_system_info(FM_ENTITY_ROOT_KEY, FM_SYSTEM_NAME);
}
fm_db_util_build_sql_query(FM_SYSTEM_TABLE_NAME, NULL, cmd);
if (sess.query(cmd.c_str(), res)){
if (res.size() > 0){
std::map<std::string,std::string> entry = res[0];
name = FM_ENTITY_REGION_KEY + entry[FM_SYSTEM_REGION_COLUMN];
}
}
return name;
std::string fm_db_util_get_region_name(){
return fm_db_util_get_system_info(FM_ENTITY_REGION_KEY, FM_REGION_NAME);
}
bool fm_db_util_get_row_counts(CFmDBSession &sess,
@ -656,13 +576,12 @@ bool fm_db_util_get_next_log_id(CFmDBSession &sess, int &id){
return true;
}
bool fm_db_util_create_session(CFmDBSession **sess){
bool fm_db_util_create_session(CFmDBSession **sess, std::string key){
TFmAlarmSessionT handle;
const char *db_conn = NULL;
std::string val;
std::string key = FM_SQL_CONNECTION;
if (fm_db_util_get_config(key, val) != true){
if (fm_get_config_key(key, val) != true){
FM_ERROR_LOG("Failed to get config for key: (%s)\n", key.c_str());
return false;
}
@ -682,34 +601,34 @@ bool fm_db_util_sync_event_suppression(void){
std::string val;
std::string key = FM_SQL_CONNECTION;
if (fm_db_util_get_config(key, val) != true){
FM_ERROR_LOG("Failed to get config for key: (%s)\n", key.c_str());
if (fm_get_config_key(key, val) != true){
FM_ERROR_LOG("NEW Failed to get config for key: (%s)\n", key.c_str());
return false;
}
db_conn = val.c_str();
FILE* file;
int argc;
char * argv[2];
FILE* file;
int argc;
char * argv[2];
FM_INFO_LOG("Starting event suppression synchronization...\n");
FM_INFO_LOG("Starting event suppression synchronization...\n");
argc = 2;
argv[0] = (char*)FM_DB_SYNC_EVENT_SUPPRESSION;
argv[1] = (char*)db_conn;
argc = 2;
argv[0] = (char*)FM_DB_SYNC_EVENT_SUPPRESSION;
argv[1] = (char*)db_conn;
Py_SetProgramName(argv[0]);
Py_Initialize();
PySys_SetArgv(argc, argv);
file = fopen(FM_DB_SYNC_EVENT_SUPPRESSION,"r");
PyRun_SimpleFile(file, FM_DB_SYNC_EVENT_SUPPRESSION);
fclose(file);
Py_Finalize();
Py_SetProgramName(argv[0]);
Py_Initialize();
PySys_SetArgv(argc, argv);
file = fopen(FM_DB_SYNC_EVENT_SUPPRESSION,"r");
PyRun_SimpleFile(file, FM_DB_SYNC_EVENT_SUPPRESSION);
fclose(file);
Py_Finalize();
FM_INFO_LOG("Completed event suppression synchronization.\n");
FM_INFO_LOG("Completed event suppression synchronization.\n");
return return_value;
return return_value;
}

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2014 Wind River Systems, Inc.
// Copyright (c) 2014-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
@ -73,16 +73,15 @@ bool fm_db_util_build_sql_delete_all(const char* db_table,
bool fm_db_util_get_row_counts(CFmDBSession &sess, const char* db_table,
int &counts);
bool fm_db_util_create_session(CFmDBSession **sess);
bool fm_db_util_create_session(CFmDBSession **sess,
std::string key=FM_SQL_CONNECTION);
std::string fm_db_util_get_system_name(CFmDBSession &sess);
std::string fm_db_util_get_system_name();
std::string fm_db_util_get_region_name(CFmDBSession &sess);
std::string fm_db_util_get_region_name();
void fm_db_util_set_conf_file(const char *fn);
bool fm_db_util_get_config(std::string &key, std::string &val);
bool fm_db_util_get_next_log_id(CFmDBSession &sess, int &id);
std::string fm_db_util_int_to_string(int val);

View File

@ -1,13 +1,12 @@
//
// Copyright (c) 2016 Wind River Systems, Inc.
// Copyright (c) 2016-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
#include <stdlib.h>
#include <string>
#include "fmDbConstants.h"
#include "fmConstants.h"
#include "fmLog.h"
#include "fmDbAlarm.h"
#include "fmEventSuppression.h"
@ -73,6 +72,7 @@ bool CFmEventSuppressionOperation::set_table_notify_listen(CFmDBSession &sess){
sql = "SELECT rulename FROM pg_rules WHERE rulename='watch_event_supression'";
if ((sess.query(sql.c_str(), rule_name)) != true){
FM_DEBUG_LOG("Failed to query the existing rule");
return false;
}
@ -84,7 +84,8 @@ bool CFmEventSuppressionOperation::set_table_notify_listen(CFmDBSession &sess){
sql += FM_EVENT_SUPPRESSION_TABLE_NAME;
sql += ")";
if (sess.cmd(sql.c_str()) != true){
if (sess.cmd(sql.c_str(), false) != true){
FM_INFO_LOG("Failed to set rule CMD: (%s)", sql.c_str());
return false;
}
@ -95,10 +96,6 @@ bool CFmEventSuppressionOperation::set_table_notify_listen(CFmDBSession &sess){
sql += FM_EVENT_SUPPRESSION_TABLE_NAME;
FM_DEBUG_LOG("CMD:(%s)\n", sql.c_str());
sess.cmd(sql.c_str()); // TODO: sess.cmd() returns false since no row affected by LISTEN command
/* if (sess.cmd(sql.c_str()) != true){
return false;
} */
return true;
// no row affected by LISTEN command
return sess.cmd(sql.c_str(), false);
}

View File

@ -9,15 +9,6 @@
#define FMEVENTSUPPRESSION_H_
/*
#include <string>
#include <vector>
#include <stddef.h>
#include <map>
#include "fmAPI.h"
#include "fmDbConstants.h"
*/
#include "fmDb.h"
class CFmEventSuppressionOperation {

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2014 Wind River Systems, Inc.
// Copyright (c) 2014-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
@ -7,12 +7,14 @@
#include <string.h>
#include <stdlib.h>
#include <stdio.h>
#include <stdarg.h>
#include <syslog.h>
#include "fmLog.h"
#include "fmDbAlarm.h"
#include "fmDbEventLog.h"
#include <stdarg.h>
#include <syslog.h>
#include "fmConfig.h"
#include "fmConstants.h"
static pthread_mutex_t mutex = PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP;
@ -24,6 +26,13 @@ void fmLoggingInit() {
openlog(NULL,LOG_CONS | LOG_NDELAY,LOG_LOCAL1);
setlogmask(LOG_UPTO (LOG_INFO));
}
std::string val;
std::string key = FM_DEBUG_FLAG;
if ((fm_get_config_key(key, val)) && (val.compare("True") == 0)){
setlogmask(LOG_UPTO (LOG_DEBUG));
} else {
setlogmask(LOG_UPTO (LOG_INFO));
}
has_inited=true;
}
@ -36,11 +45,6 @@ void fmLogMsg(int level, const char *data,...){
va_end(ap);
}
bool fmLogFileInit(){
fmLoggingInit();
return true;
}
// formats event into json form for logging
static char * formattedEvent(CFmDbEventLog::data_type event_map, char * output, int outputSize) {
int bufLen = 1024;

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2014 Wind River Systems, Inc.
// Copyright (c) 2014-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
@ -68,9 +68,6 @@ bool fmLogFileInit();
void fmLogAddEventLog(SFmAlarmDataT * data, bool is_event_suppressed);
//void fmLogAddEventLog(SFmAlarmDataT * data);
#endif

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2017 Wind River Systems, Inc.
// Copyright (c) 2017-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
@ -40,8 +40,9 @@
#include "fmSnmpUtils.h"
#include "fmDbUtils.h"
#include "fmDbEventLog.h"
#include "fmDbConstants.h"
#include "fmConstants.h"
#include "fmEventSuppression.h"
#include "fmConfig.h"
#define FM_UUID_LENGTH 36
@ -125,7 +126,7 @@ static bool dequeue_get(sFmGetReq &req){
return true;
}
void create_db_log(CFmDBSession &sess, sFmJobReq &req){
void create_db_log(sFmJobReq &req){
SFmAlarmDataT alarm = req.data;
if (alarm.alarm_state != FM_ALARM_STATE_MSG){
@ -135,7 +136,7 @@ void create_db_log(CFmDBSession &sess, sFmJobReq &req){
}
fmLogAddEventLog(&alarm, false);
fm_snmp_util_gen_trap(sess, FM_ALARM_MESSAGE, alarm);
fm_snmp_util_gen_trap(FM_ALARM_MESSAGE, alarm);
}
void get_db_alarm(CFmDBSession &sess, sFmGetReq &req, void *context){
@ -293,7 +294,7 @@ void fm_handle_job_request(CFmDBSession &sess, sFmJobReq &req){
//check if it is a customer log request
if (req.type == FM_CUSTOMER_LOG) {
return create_db_log(sess,req);
return create_db_log(req);
}
// check to see if there are any alarms need to be masked/unmasked
@ -317,7 +318,7 @@ void fm_handle_job_request(CFmDBSession &sess, sFmJobReq &req){
req.data.alarm_id);
} else {
if (!is_event_suppressed)
fm_snmp_util_gen_trap(sess, req.type, req.data);
fm_snmp_util_gen_trap(req.type, req.data);
}
fmLogAddEventLog(&req.data, is_event_suppressed);
@ -572,14 +573,10 @@ EFmErrorT fm_server_create(const char *fn) {
hints.ai_addr = NULL;
hints.ai_next = NULL;
fm_conf_set_file(fn);
fmLoggingInit();
if (!fmLogFileInit()){
exit(-1);
}
fm_db_util_set_conf_file(fn);
if (!fm_db_util_sync_event_suppression()){
exit(-1);
}
@ -704,7 +701,7 @@ bool fm_handle_event_suppress_changes(CFmDBSession &sess){
}
SFmAlarmDataT *alarm = NULL;
fm_snmp_util_gen_trap(sess, FM_WARM_START, *alarm);
fm_snmp_util_gen_trap(FM_WARM_START, *alarm);
return true;
}

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2014 Wind River Systems, Inc.
// Copyright (c) 2017-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
@ -14,12 +14,13 @@
#define FM_CUSTOMER_LOG 10
/* Trap Destination table name */
#define FM_TRAPDEST_TABLE_NAME "i_trap_destination"
/* Trap Destination definitions */
#define FM_TRAPDEST_IP_COLUMN "ip_address"
#define FM_SNMP_TRAPDEST "trap_destinations"
#define FM_TRAPDEST_COMM_COLUMN "community"
#define FM_TRAPDEST_IP "ip_address"
#define FM_TRAPDEST_COMM "community"
/* MIB Trap definitions */
const std::string WRS_ALARM_MIB = "WRS-ALARM-MIB";

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2014 Wind River Systems, Inc.
// Copyright (c) 2014-2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
@ -11,6 +11,7 @@
#include <map>
#include <assert.h>
#include <sstream>
#include <vector>
#include "fmDbAPI.h"
#include "fmFile.h"
@ -21,6 +22,7 @@
#include "fmDbUtils.h"
#include "fmSnmpConstants.h"
#include "fmSnmpUtils.h"
#include "fmConfig.h"
typedef std::map<int,std::string> int_to_objtype;
@ -28,6 +30,11 @@ static int_to_objtype objtype_map;
static pthread_mutex_t mutex = PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP;
fm_db_result_t &getTrapDestList(){
static fm_db_result_t trap_dest_list;
return trap_dest_list;
}
static void add_to_table(int t, std::string objtype, int_to_objtype &tbl) {
tbl[t]=objtype;
}
@ -72,14 +79,38 @@ static std::string get_trap_objtype(int type){
init_objtype_table();
return objtype_map[type];
}
static bool get_trap_dest_list(CFmDBSession &sess,fm_db_result_t & res){
std::string cmd;
fm_db_util_build_sql_query(FM_TRAPDEST_TABLE_NAME, NULL, cmd);
return sess.query(cmd.c_str(), res);
static void add_to_list(std::vector<std::string> &trap_strings) {
std::string delimiter = " ";
std::vector<std::string>::iterator it = trap_strings.begin();
std::vector<std::string>::iterator end = trap_strings.end();
getTrapDestList().clear();
for (; it != end; ++it){
size_t pos = 0;
fm_db_single_result_t entry;
pos = (*it).find(delimiter);
entry[FM_TRAPDEST_IP] = (*it).substr(0, pos);
entry[FM_TRAPDEST_COMM] = (*it).erase(0, pos + delimiter.length());
getTrapDestList().push_back(entry);
}
}
static std::string format_trap_cmd(CFmDBSession &sess, int type, SFmAlarmDataT &data,
void set_trap_dest_list(std::string value){
std::vector<std::string> entries;
std::istringstream f(value);
std::string s;
while (getline(f, s, ',')) {
std::cout << s << std::endl;
FM_INFO_LOG("Add entry: (%s)", s.c_str());
entries.push_back(s);
}
add_to_list(entries);
FM_INFO_LOG("Set trap entries: (%d)", getTrapDestList().size());
}
static std::string format_trap_cmd(int type, SFmAlarmDataT &data,
std::string &ip, std::string &comm){
std::string cmd;
std::string objtype;
@ -140,28 +171,29 @@ static std::string format_trap_cmd(CFmDBSession &sess, int type, SFmAlarmDataT &
return cmd;
}
bool fm_snmp_util_gen_trap(CFmDBSession &sess, int type, SFmAlarmDataT &data) {
bool fm_snmp_util_gen_trap(int type, SFmAlarmDataT &data) {
bool rc = true;
fm_buff_t cmdbuff;
fm_db_result_t res;
std::string cmd, eid;
if (!get_trap_dest_list(sess,res)) return false;
res = getTrapDestList();
if (&data != NULL) {
eid.assign(data.entity_instance_id);
std::string region_name = fm_db_util_get_region_name(sess);
std::string sys_name = fm_db_util_get_system_name(sess);
if (sys_name.length() != 0){
eid = sys_name + "."+ eid;
}
if (region_name.length() != 0){
eid = region_name + "."+ eid;
}
strncpy(data.entity_instance_id, eid.c_str(),
sizeof(data.entity_instance_id)-1);
}
if (&data != NULL) {
eid.assign(data.entity_instance_id);
std::string region_name = fm_db_util_get_region_name();
std::string sys_name = fm_db_util_get_system_name();
if (sys_name.length() != 0){
eid = sys_name + "."+ eid;
}
if (region_name.length() != 0){
eid = region_name + "."+ eid;
}
strncpy(data.entity_instance_id, eid.c_str(),
sizeof(data.entity_instance_id)-1);
}
fm_db_result_t::iterator it = res.begin();
fm_db_result_t::iterator end = res.end();
@ -169,9 +201,9 @@ bool fm_snmp_util_gen_trap(CFmDBSession &sess, int type, SFmAlarmDataT &data) {
for (; it != end; ++it){
memset(&(cmdbuff[0]), 0, cmdbuff.size());
cmd.clear();
std::string ip = (*it)[FM_TRAPDEST_IP_COLUMN];
std::string comm = (*it)[FM_TRAPDEST_COMM_COLUMN];
cmd = format_trap_cmd(sess,type, data, ip, comm);
std::string ip = (*it)[FM_TRAPDEST_IP];
std::string comm = (*it)[FM_TRAPDEST_COMM];
cmd = format_trap_cmd(type, data, ip, comm);
//FM_INFO_LOG("run cmd: %s\n", cmd.c_str());
char *pline = &(cmdbuff[0]);
@ -190,42 +222,17 @@ bool fm_snmp_util_gen_trap(CFmDBSession &sess, int type, SFmAlarmDataT &data) {
}
static bool fm_snmp_get_db_connection(std::string &connection){
CfmFile f;
const char *fn = "/etc/fm.conf";
std::string sql_key = FM_SQL_CONNECTION;
std::string delimiter = "=";
std::string line, key, value;
size_t pos = 0;
const char *fn = "/etc/fm/fm.conf";
std::string key = FM_SQL_CONNECTION;
if (!f.open(fn, CfmFile::READ, false)){
FM_ERROR_LOG("Failed to open config file: %s\n", fn);
exit (-1);
}
while (true){
if (!f.read_line(line)) break;
if (line.size() == 0) continue;
pos = line.find(delimiter);
key = line.substr(0, pos);
if (key == sql_key){
value = line.erase(0, pos + delimiter.length());
// Don't log sql_connection, as it has a password
//FM_DEBUG_LOG("Found it: (%s)\n", value.c_str());
connection = value;
return true;
}
}
return false;;
fm_conf_set_file(fn);
return fm_get_config_key(key, connection);
}
extern "C" {
bool fm_snmp_util_create_session(TFmAlarmSessionT *handle, const char* db_conn){
std::string key = FM_SQL_CONNECTION;
std::string conn;
CFmDBSession *sess = new CFmDBSession;
if (sess==NULL) return false;;

View File

@ -12,6 +12,8 @@
#include "fmAPI.h"
#include "fmDb.h"
bool fm_snmp_util_gen_trap(CFmDBSession &sess, int type, SFmAlarmDataT &data);
bool fm_snmp_util_gen_trap(int type, SFmAlarmDataT &data);
void set_trap_dest_list(std::string value);
#endif

View File

@ -1,5 +1,5 @@
#!/usr/bin/env python
# Copyright (c) 2016 Wind River Systems, Inc.
# Copyright (c) 2016-2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
@ -40,13 +40,13 @@ class EventSuppression(Base):
class ialarm(Base):
__tablename__ = 'i_alarm'
__tablename__ = 'alarm'
id = Column(Integer, primary_key=True, nullable=False)
alarm_id = Column('alarm_id', String(255), index=True)
class event_log(Base):
__tablename__ = 'i_event_log'
__tablename__ = 'event_log'
id = Column(Integer, primary_key=True, nullable=False)
event_log_id = Column('event_log_id', String(255), index=True)
state = Column(String(255))

View File

@ -0,0 +1,311 @@
//
// Copyright (c) 2018 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
#include <python2.7/Python.h>
#include <stdio.h>
#include "fmAPI.h"
#include "fmAlarmUtils.h"
static const size_t DEF_MAX_ALARMS (1000);
static const size_t MAXSTRINGSIZE (500);
static PyObject *logging = NULL;
enum { error, warning, info, debug, max_level };
#define LOG_MSG(level,data,...) \
log_msg(level, "fm_python_extension: "\
data, ## __VA_ARGS__ )
#define ERROR_LOG(data,...) \
LOG_MSG(error, data, ## __VA_ARGS__)
#define WARNING_LOG(data,...) \
LOG_MSG(warning, data, ## __VA_ARGS__)
#define INFO_LOG(data,...) \
LOG_MSG(info, data, ## __VA_ARGS__)
#define DEBUG_LOG(data,...) \
LOG_MSG(debug, data, ## __VA_ARGS__)
static void log_msg(int type, const char *data,...)
{
static PyObject *str = NULL;
const char* methods[] = {"error", "warning", "info", "debug"};
if (logging == NULL) {
logging = PyImport_ImportModuleNoBlock("logging");
if (logging == NULL) {
PyErr_SetString(PyExc_ImportError,
"Could not import python module 'logging'");
}
}
va_list ap;
char buff[MAXSTRINGSIZE];
va_start(ap, data );
vsnprintf(buff, sizeof(buff), data, ap);
va_end(ap);
str = Py_BuildValue((char *)"s", buff);
if (type < max_level) {
PyObject_CallMethod(logging, (char *)methods[type], (char *)"O", str);
}
Py_DECREF(str);
}
static PyObject * _fm_set(PyObject * self, PyObject *args) {
SFmAlarmDataT alm_data;
std::string alarm;
fm_uuid_t tmp_uuid;
const char *alm_str;
EFmErrorT rc;
if (!PyArg_ParseTuple(args, "s", &alm_str)) {
ERROR_LOG("Failed to parse args.");
Py_RETURN_NONE;
}
alarm.assign(alm_str);
if (!fm_alarm_from_string(alarm, &alm_data)) {
ERROR_LOG("Failed to convert string to alarm.");
Py_RETURN_NONE;
}
rc = fm_set_fault(&alm_data, &tmp_uuid);
if (rc == FM_ERR_OK) {
return PyString_FromString(&(tmp_uuid[0]));
}
if (rc == FM_ERR_NOCONNECT){
// when the fm-manager process has not been started by SM
WARNING_LOG("Failed to connect to FM manager");
} else {
ERROR_LOG("Failed to generate an alarm: (%s) (%s)",
alm_data.alarm_id, alm_data.entity_instance_id);
}
Py_RETURN_NONE;
}
static PyObject * _fm_get(PyObject * self, PyObject *args) {
const char *filter;
std::string alm_str, filter_str;
AlarmFilter af;
SFmAlarmDataT ad;
EFmErrorT rc;
if (!PyArg_ParseTuple(args, "s", &filter)) {
ERROR_LOG("Failed to parse args");
Py_RETURN_NONE;
}
filter_str.assign(filter);
if (!fm_alarm_filter_from_string(filter_str, &af)) {
ERROR_LOG("Invalid alarm filter: (%s)", filter_str.c_str());
Py_RETURN_NONE;
}
rc = fm_get_fault(&af,&ad);
if (rc == FM_ERR_OK) {
fm_alarm_to_string(&ad,alm_str);
return PyString_FromString(alm_str.c_str());
}
if (rc == FM_ERR_ENTITY_NOT_FOUND) {
DEBUG_LOG("Alarm id (%s), Entity id:(%s) not found",
af.alarm_id, af.entity_instance_id);
} else if (rc == FM_ERR_NOCONNECT) {
WARNING_LOG("Failed to connect to FM manager");
} else {
ERROR_LOG("Failed to get alarm by filter: (%s) (%s), error code: (%d)",
af.alarm_id, af.entity_instance_id, rc);
}
Py_RETURN_NONE;
}
static PyObject * _fm_get_by_aid(PyObject * self, PyObject *args, PyObject* kwargs) {
const char *aid;
fm_alarm_id alm_id;
unsigned int max = DEF_MAX_ALARMS;
char* keywords[] = {"alarm_id", "max", (char*)NULL};
memset(alm_id, 0 , sizeof(alm_id));
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s|i", keywords, &aid, &max)) {
ERROR_LOG("Failed to parse args");
Py_RETURN_NONE;
}
strncpy(alm_id, aid, sizeof(alm_id)-1);
std::vector< SFmAlarmDataT > lst;
try {
lst.resize(max);
} catch(...) {
ERROR_LOG("Failed to allocate memory");
Py_RETURN_NONE;
}
unsigned int max_alarms_to_get = max;
EFmErrorT rc = fm_get_faults_by_id(&alm_id, &(lst[0]), &max_alarms_to_get);
if (rc == FM_ERR_OK) {
PyObject *__lst = PyList_New(0);
for ( size_t ix = 0 ; ix < max_alarms_to_get ; ++ix ) {
std::string s;
fm_alarm_to_string(&lst[ix],s);
if (s.size() > 0) {
if (PyList_Append(__lst, PyString_FromString(s.c_str())) != 0) {
ERROR_LOG("Failed to append alarm to the list");
}
}
}
/* python will garbage collect if the reference count is correct
(it should be 1 at this point) */
return __lst;
}
if (rc == FM_ERR_ENTITY_NOT_FOUND) {
DEBUG_LOG("No alarm found for alarm id (%s)", alm_id);
} else if (rc == FM_ERR_NOCONNECT) {
WARNING_LOG("Failed to connect to FM manager");
} else {
ERROR_LOG("Failed to get alarm list for alarm id (%s), error code: (%d)", alm_id, rc);
}
Py_RETURN_NONE;
}
static PyObject * _fm_get_by_eid(PyObject * self, PyObject *args, PyObject* kwargs) {
const char *eid;
fm_ent_inst_t inst_id;
std::vector< SFmAlarmDataT > lst;
unsigned int max= DEF_MAX_ALARMS;
char* keywords[] = {"entity_instance_id", "max", (char*)NULL};
memset(inst_id, 0 , sizeof(inst_id));
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s|i", keywords, &eid, &max)) {
ERROR_LOG("Failed to parse args");
Py_RETURN_NONE;
}
strncpy(inst_id, eid ,sizeof(inst_id)-1);
try {
lst.resize(max);
} catch(...) {
ERROR_LOG("Failed to allocate memory");
Py_RETURN_NONE;
}
unsigned int max_alarms_to_get = max;
EFmErrorT rc = fm_get_faults(&inst_id, &(lst[0]), &max_alarms_to_get);
if (rc == FM_ERR_OK) {
PyObject *__lst = PyList_New(0);
for ( size_t ix = 0; ix < max_alarms_to_get; ++ix ) {
std::string s;
fm_alarm_to_string(&lst[ix], s);
if (s.size() > 0) {
if (PyList_Append(__lst,PyString_FromString(s.c_str())) != 0) {
ERROR_LOG("Failed to append alarm to the list");
}
}
}
/* python will garbage collect if the reference count is correct
(it should be 1 at this point) */
return __lst;
}
if (rc == FM_ERR_ENTITY_NOT_FOUND) {
DEBUG_LOG("No alarm found for entity id (%s)", inst_id);
} else if (rc == FM_ERR_NOCONNECT) {
WARNING_LOG("Failed to connect to FM manager");
} else {
ERROR_LOG("Failed to get alarm list for entity id (%s), error code: (%d)", inst_id, rc);
}
Py_RETURN_NONE;
}
static PyObject * _fm_clear(PyObject * self, PyObject *args) {
const char *filter;
std::string alm_str, filter_str;
AlarmFilter af;
EFmErrorT rc;
if (!PyArg_ParseTuple(args, "s", &filter)) {
ERROR_LOG("Failed to parse args");
Py_RETURN_FALSE;
}
filter_str.assign(filter);
if (!fm_alarm_filter_from_string(filter_str, &af)) {
ERROR_LOG("Invalid alarm filter: (%s)", filter_str.c_str());
Py_RETURN_FALSE;
}
rc = fm_clear_fault(&af);
if (rc == FM_ERR_OK) {
Py_RETURN_TRUE;
}
if (rc == FM_ERR_ENTITY_NOT_FOUND) {
DEBUG_LOG("No alarm found to clear: (%s) (%s)", af.alarm_id, af.entity_instance_id);
} else if (rc == FM_ERR_NOCONNECT) {
WARNING_LOG("Failed to connect to FM manager");
} else {
ERROR_LOG("Failed to clear alarm by filter: (%s) (%s), error code: (%d)",
af.alarm_id, af.entity_instance_id, rc);
}
Py_RETURN_FALSE;
}
static PyObject * _fm_clear_all(PyObject * self, PyObject *args) {
fm_ent_inst_t inst_id;
const char *eid;
EFmErrorT rc;
memset(inst_id, 0 , sizeof(inst_id));
if (!PyArg_ParseTuple(args,"s", &eid)) {
ERROR_LOG("Failed to parse args");
Py_RETURN_FALSE;
}
strncpy(inst_id, eid ,sizeof(inst_id)-1);
rc = fm_clear_all(&inst_id);
if (rc == FM_ERR_OK) {
Py_RETURN_TRUE;
} else {
ERROR_LOG("Failed to clear alarms with entity id (%s), error code: (%d)",
inst_id, rc);
Py_RETURN_FALSE;
}
}
static PyMethodDef _methods [] = {
{ "set", _fm_set, METH_VARARGS, "Set or update an alarm" },
{ "get", _fm_get, METH_VARARGS, "Get alarms by filter" },
{ "clear", _fm_clear, METH_VARARGS, "Clear an alarm by filter" },
{ "clear_all", _fm_clear_all, METH_VARARGS,
"Clear alarms that match the entity instance id"},
{ "get_by_aid", (PyCFunction)_fm_get_by_aid, METH_VARARGS | METH_KEYWORDS,
"Get alarms by alarm id" },
{ "get_by_eid", (PyCFunction)_fm_get_by_eid, METH_VARARGS | METH_KEYWORDS,
"Get alarms by entity instance id" },
{ NULL, NULL, 0, NULL }
};
PyMODINIT_FUNC initfm_core() {
PyObject *m = Py_InitModule("fm_core", _methods);
if (m == NULL){
PySys_WriteStderr("Failed to initialize fm_core");
return;
}
}

View File

@ -0,0 +1,11 @@
from distutils.core import setup, Extension
setup(name="fm_core", version="1.0",
ext_modules=[Extension("fm_core", [
"fm_python_mod_main.cpp"],
libraries=[
'pq',
"fmcommon"],
library_dirs=['.']
)])

View File

@ -5,7 +5,7 @@ Summary: CGTS Platform Fault Manager Package
Name: fm-mgr
Version: 1.0
Release: %{tis_patch_ver}%{?_tis_dist}
License: Apache-2.0
License: windriver
Group: base
Packager: Wind River <info@windriver.com>
URL: unknown
@ -15,7 +15,7 @@ BuildRequires: systemd-devel
BuildRequires: libuuid-devel
%description
CGTS platform Fault Manager that serves the client
CGTS platform Fault Manager that serves the client
application fault management requests and raise/clear/update
alarms in the active alarm database.
@ -43,7 +43,6 @@ rm -rf $RPM_BUILD_ROOT
%defattr(-,root,root,-)
%doc LICENSE
%{local_bindir}/fmManager
%config(noreplace) %{_sysconfdir}/fm.conf
%_sysconfdir/init.d/fminit
%{_unitdir}/fminit.service
%config(noreplace) %{_sysconfdir}/logrotate.d/fm.logrotate

View File

@ -23,7 +23,6 @@ install_non_bb:
install -m 755 -d $(DEST_DIR)/etc/logrotate.d
install -m 755 -d $(DEST_DIR)/usr/local
install -m 755 -d $(DEST_DIR)/usr/local/bin
install -m 644 fm.conf $(DEST_DIR)/etc/fm.conf
install -m 755 fminit $(DEST_DIR)/etc/init.d/fminit
install -m 755 fmManager $(DEST_DIR)/usr/local/bin/fmManager
install -m 644 fm.logrotate $(DEST_DIR)/etc/logrotate.d/fm.logrotate

View File

@ -6,25 +6,20 @@
#include <stdio.h>
#include <stdlib.h>
#include <fmAPI.h>
#include <stdbool.h>
#include <unistd.h>
#include <signal.h>
#include <syslog.h>
#include <fmAPI.h>
#include <fmConfig.h>
#include <fmLog.h>
void sig_handler(int signo) {
int result = 0;
if (signo == SIGHUP){
result = setlogmask(LOG_UPTO (LOG_DEBUG));
if (result == LOG_UPTO (LOG_DEBUG)){
result = setlogmask(LOG_UPTO (LOG_INFO));
syslog(LOG_INFO, "Received SIGHUP, set log level from %d to LOG_INFO", result);
}else{
syslog(LOG_INFO, "Received SIGHUP, set log level from %d to LOG_DEBUG", result);
}
}
fm_get_config_paramters();
fmLoggingInit();
}
}
int main(int argc, char *argv[]) {

View File

@ -20,8 +20,7 @@
FMMGR_NAME="fmManager"
FMMGR="/usr/local/bin/${FMMGR_NAME}"
PIDFILE=/var/run/${FMMGR_NAME}.pid
CONFIGFILE=/etc/fm.conf
CONFIGFILE=/etc/fm/fm.conf
# Linux Standard Base (LSB) Error Codes
RETVAL=0

12
fm-rest-api/PKG-INFO Normal file
View File

@ -0,0 +1,12 @@
Metadata-Version: 1.1
Name: fm-rest-api
Version: 1.0
Summary: Fault Manager REST API
Home-page:
Author: Windriver
Author-email: info@windriver.com
License: windriver
Description: Fault Manager REST API
Platform: UNKNOWN

View File

@ -0,0 +1,2 @@
SRC_DIR="fm"
TIS_PATCH_VER=1

View File

@ -0,0 +1,96 @@
Summary: Fault Management Openstack REST API
Name: fm-rest-api
Version: 1.0
Release: %{tis_patch_ver}%{?_tis_dist}
License: windriver
Group: base
Packager: Wind River <info@windriver.com>
URL: unknown
Source0: %{name}-%{version}.tar.gz
BuildRequires: python-setuptools
BuildRequires: python-oslo-config
BuildRequires: python-oslo-db
BuildRequires: python-oslo-log
BuildRequires: python-oslo-messaging
BuildRequires: python-oslo-middleware
Requires: python-eventlet
Requires: python-webob
Requires: python-paste
BuildRequires: systemd
%description
Fault Management Openstack REST API Service
%define local_bindir /usr/bin/
%define local_initddir /etc/rc.d/init.d
%define pythonroot /usr/lib64/python2.7/site-packages
%define local_etc_pmond /etc/pmon.d/
%define debug_package %{nil}
%prep
%autosetup -n %{name}-%{version}
# Remove bundled egg-info
rm -rf *.egg-info
%build
echo "Start build"
export PBR_VERSION=%{version}
%{__python} setup.py build
PYTHONPATH=. oslo-config-generator --config-file=fm/config-generator.conf
%install
echo "Start install"
export PBR_VERSION=%{version}
%{__python} setup.py install --root=%{buildroot} \
--install-lib=%{pythonroot} \
--prefix=/usr \
--install-data=/usr/share \
--single-version-externally-managed
install -p -D -m 644 scripts/fm-api.service %{buildroot}%{_unitdir}/fm-api.service
install -d -m 755 %{buildroot}%{local_initddir}
install -p -D -m 755 scripts/fm-api %{buildroot}%{local_initddir}/fm-api
install -d -m 755 %{buildroot}%{local_etc_pmond}
install -p -D -m 644 fm-api-pmond.conf %{buildroot}%{local_etc_pmond}/fm-api.conf
# Install sql migration stuff that wasn't installed by setup.py
install -m 640 fm/db/sqlalchemy/migrate_repo/migrate.cfg %{buildroot}%{pythonroot}/fm/db/sqlalchemy/migrate_repo/migrate.cfg
# install default config files
cd %{_builddir}/%{name}-%{version} && oslo-config-generator --config-file fm/config-generator.conf --output-file %{_builddir}/%{name}-%{version}/fm.conf.sample
install -p -D -m 644 %{_builddir}/%{name}-%{version}/fm.conf.sample %{buildroot}%{_sysconfdir}/fm/fm.conf
%clean
echo "CLEAN CALLED"
rm -rf $RPM_BUILD_ROOT
%post
/bin/systemctl enable fm-api.service >/dev/null 2>&1
%files
%defattr(-,root,root,-)
%doc LICENSE
%{local_bindir}/*
%{local_initddir}/*
%{pythonroot}/fm/*
%{pythonroot}/fm-%{version}*.egg-info
%config(noreplace) %{_sysconfdir}/fm/fm.conf
# systemctl service files
%{_unitdir}/fm-api.service
# pmond config file
%{local_etc_pmond}/fm-api.conf

176
fm-rest-api/fm/LICENSE Executable file
View File

@ -0,0 +1,176 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.

View File

@ -0,0 +1,10 @@
[process]
process = fm-api
pidfile = /var/run/fm-api.pid
script = /etc/init.d/fm-api
style = lsb ; ocf or lsb
severity = major ; minor, major, critical
restarts = 3 ; restarts before error assertion
interval = 5 ; number of seconds to wait between restarts
debounce = 20 ; number of seconds to wait before degrade clear

View File

@ -0,0 +1,5 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#

View File

@ -0,0 +1,5 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#

View File

@ -0,0 +1,85 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from oslo_service import service
from oslo_service import wsgi
from oslo_config import cfg
from oslo_log import log
import pecan
from fm.api import config
from fm.common import policy
from fm.common.i18n import _
CONF = cfg.CONF
LOG = log.getLogger(__name__)
_launcher = None
def get_pecan_config():
# Set up the pecan configuration
filename = config.__file__.replace('.pyc', '.py')
return pecan.configuration.conf_from_file(filename)
def setup_app(config=None):
policy.init()
if not config:
config = get_pecan_config()
pecan.configuration.set_config(dict(config), overwrite=True)
app_conf = dict(config.app)
app = pecan.make_app(
app_conf.pop('root'),
debug=CONF.debug,
logging=getattr(config, 'logging', {}),
force_canonical=getattr(config.app, 'force_canonical', True),
guess_content_type_from_ext=False,
**app_conf
)
return app
def load_paste_app(app_name=None):
"""Loads a WSGI app from a paste config file."""
if app_name is None:
app_name = cfg.CONF.prog
loader = wsgi.Loader(cfg.CONF)
app = loader.load_app(app_name)
return app
def app_factory(global_config, **local_conf):
return setup_app()
def serve(api_service, conf, workers=1):
global _launcher
if _launcher:
raise RuntimeError(_('serve() can only be called once'))
_launcher = service.launch(conf, api_service, workers=workers)
def wait():
_launcher.wait()

View File

@ -0,0 +1,67 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import sys
import pbr.version
from oslo_config import cfg
from oslo_log import log as logging
from keystoneauth1 import loading as ks_loading
from fm.api import hooks
LOG = logging.getLogger(__name__)
sysinv_group = cfg.OptGroup(
'sysinv',
title='Sysinv Options',
help="Configuration options for the platform service")
sysinv_opts = [
cfg.StrOpt('catalog_info',
default='platform:sysinv:internalURL',
help="Service catalog Look up info."),
cfg.StrOpt('os_region_name',
default='RegionOne',
help="Region name of this node. It is used for catalog lookup"),
]
version_info = pbr.version.VersionInfo('fm')
# Pecan Application Configurations
app = {
'root': 'fm.api.controllers.root.RootController',
'modules': ['fm.api'],
'hooks': [
hooks.ContextHook(),
hooks.DBHook(),
],
'acl_public_routes': [
'/',
'/v1',
],
}
def init(args, **kwargs):
cfg.CONF.register_group(sysinv_group)
cfg.CONF.register_opts(sysinv_opts, group=sysinv_group)
ks_loading.register_session_conf_options(cfg.CONF,
sysinv_group.name)
logging.register_options(cfg.CONF)
cfg.CONF(args=args, project='fm',
version='%%(prog)s %s' % version_info.release_string(),
**kwargs)
def setup_logging():
"""Sets up the logging options for a log with supplied name."""
logging.setup(cfg.CONF, "fm")
LOG.debug("Logging enabled!")
LOG.debug("%(prog)s version %(version)s",
{'prog': sys.argv[0],
'version': version_info.release_string()})
LOG.debug("command line: %s", " ".join(sys.argv))

View File

@ -0,0 +1,5 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#

View File

@ -0,0 +1,110 @@
# -*- encoding: utf-8 -*-
#
# Copyright © 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from fm.api.controllers import v1
from fm.api.controllers.v1 import base
from fm.api.controllers.v1 import link
ID_VERSION = 'v1'
def expose(*args, **kwargs):
"""Ensure that only JSON, and not XML, is supported."""
if 'rest_content_types' not in kwargs:
kwargs['rest_content_types'] = ('json',)
return wsme_pecan.wsexpose(*args, **kwargs)
class Version(base.APIBase):
"""An API version representation.
This class represents an API version, including the minimum and
maximum minor versions that are supported within the major version.
"""
id = wtypes.text
"""The ID of the (major) version, also acts as the release number"""
links = [link.Link]
"""A Link that point to a specific version of the API"""
@classmethod
def convert(cls, vid):
version = Version()
version.id = vid
version.links = [link.Link.make_link('self', pecan.request.host_url,
vid, '', bookmark=True)]
return version
class Root(base.APIBase):
name = wtypes.text
"""The name of the API"""
description = wtypes.text
"""Some information about this API"""
versions = [Version]
"""Links to all the versions available in this API"""
default_version = Version
"""A link to the default version of the API"""
@staticmethod
def convert():
root = Root()
root.name = "Fault Management API"
root.description = ("Fault Management is an OpenStack project which "
"provides REST API services for alarms and logs.")
root.default_version = Version.convert(ID_VERSION)
root.versions = [root.default_version]
return root
class RootController(rest.RestController):
_versions = [ID_VERSION]
"""All supported API versions"""
_default_version = ID_VERSION
"""The default API version"""
v1 = v1.Controller()
@expose(Root)
def get(self):
# NOTE: The reason why convert() it's being called for every
# request is because we need to get the host url from
# the request object to make the links.
return Root.convert()
@pecan.expose()
def _route(self, args):
"""Overrides the default routing behavior.
It redirects the request to the default version of the FM API
if the version number is not specified in the url.
"""
if args[0] and args[0] not in self._versions:
args = [self._default_version] + args
return super(RootController, self)._route(args)

View File

@ -0,0 +1,107 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import pecan
import wsmeext.pecan as wsme_pecan
from pecan import rest
from wsme import types as wtypes
from fm.api.controllers.v1 import link
from fm.api.controllers.v1 import alarm
from fm.api.controllers.v1 import base
from fm.api.controllers.v1 import event_log
from fm.api.controllers.v1 import event_suppression
class MediaType(base.APIBase):
"""A media type representation."""
base = wtypes.text
type = wtypes.text
def __init__(self, base, type):
self.base = base
self.type = type
class V1(base.APIBase):
"""The representation of the version 1 of the API."""
id = wtypes.text
"The ID of the version, also acts as the release number"
media_types = [MediaType]
"An array of supported media types for this version"
links = [link.Link]
"Links that point to a specific URL for this version and documentation"
alarms = [link.Link]
"Links to the alarm resource"
event_log = [link.Link]
"Links to the event_log resource"
event_suppression = [link.Link]
"Links to the event_suppression resource"
@classmethod
def convert(self):
v1 = V1()
v1.id = "v1"
v1.links = [link.Link.make_link('self', pecan.request.host_url,
'v1', '', bookmark=True),
link.Link.make_link('describedby',
'http://www.windriver.com',
'developer/fm/dev',
'api-spec-v1.html',
bookmark=True, type='text/html')
]
v1.media_types = [MediaType('application/json',
'application/vnd.openstack.fm.v1+json')]
v1.alarms = [link.Link.make_link('self', pecan.request.host_url,
'alarms', ''),
link.Link.make_link('bookmark',
pecan.request.host_url,
'alarms', '',
bookmark=True)
]
v1.event_log = [link.Link.make_link('self', pecan.request.host_url,
'event_log', ''),
link.Link.make_link('bookmark',
pecan.request.host_url,
'event_log', '',
bookmark=True)
]
v1.event_suppression = [link.Link.make_link('self',
pecan.request.host_url,
'event_suppression', ''),
link.Link.make_link('bookmark',
pecan.request.host_url,
'event_suppression', '',
bookmark=True)
]
return v1
class Controller(rest.RestController):
"""Version 1 API controller root."""
alarms = alarm.AlarmController()
event_log = event_log.EventLogController()
event_suppression = event_suppression.EventSuppressionController()
@wsme_pecan.wsexpose(V1)
def get(self):
return V1.convert()
__all__ = ('Controller',)

View File

@ -0,0 +1,341 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import datetime
import pecan
from pecan import rest
import wsme
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from oslo_log import log
from fm_api import fm_api
from fm.api.controllers.v1 import base
from fm.api.controllers.v1 import collection
from fm.api.controllers.v1 import link
from fm.api.controllers.v1 import types
from fm.api.controllers.v1 import utils as api_utils
from fm.common import exceptions
from fm.common import constants
from fm import objects
from fm.api.controllers.v1.query import Query
from fm.api.controllers.v1.sysinv import cgtsclient
from fm_api import constants as fm_constants
LOG = log.getLogger(__name__)
class AlarmPatchType(types.JsonPatchType):
pass
class Alarm(base.APIBase):
"""API representation of an alarm.
This class enforces type checking and value constraints, and converts
between the internal object model and the API representation of
an alarm.
"""
uuid = types.uuid
"The UUID of the alarm"
alarm_id = wsme.wsattr(wtypes.text, mandatory=True)
"structured id for the alarm; AREA_ID ID; 300-001"
alarm_state = wsme.wsattr(wtypes.text, mandatory=True)
"The state of the alarm"
entity_type_id = wtypes.text
"The type of the object raising alarm"
entity_instance_id = wsme.wsattr(wtypes.text, mandatory=True)
"The original instance information of the object raising alarm"
timestamp = datetime.datetime
"The time in UTC at which the alarm state is last updated"
severity = wsme.wsattr(wtypes.text, mandatory=True)
"The severity of the alarm"
reason_text = wtypes.text
"The reason why the alarm is raised"
alarm_type = wsme.wsattr(wtypes.text, mandatory=True)
"The type of the alarm"
probable_cause = wsme.wsattr(wtypes.text, mandatory=True)
"The probable cause of the alarm"
proposed_repair_action = wtypes.text
"The action to clear the alarm"
service_affecting = wtypes.text
"Whether the alarm affects the service"
suppression = wtypes.text
"'allowed' or 'not-allowed'"
suppression_status = wtypes.text
"'suppressed' or 'unsuppressed'"
mgmt_affecting = wtypes.text
"Whether the alarm prevents software management actions"
degrade_affecting = wtypes.text
"Wheter the alarm prevents filesystem resize actions"
links = [link.Link]
"A list containing a self link and associated alarm links"
def __init__(self, **kwargs):
self.fields = objects.alarm.fields.keys()
for k in self.fields:
setattr(self, k, kwargs.get(k))
@classmethod
def convert_with_links(cls, rpc_ialarm, expand=True):
if isinstance(rpc_ialarm, tuple):
alarms = rpc_ialarm[0]
suppress_status = rpc_ialarm[constants.DB_SUPPRESS_STATUS]
mgmt_affecting = rpc_ialarm[constants.DB_MGMT_AFFECTING]
degrade_affecting = rpc_ialarm[constants.DB_DEGRADE_AFFECTING]
else:
alarms = rpc_ialarm
suppress_status = rpc_ialarm.suppression_status
mgmt_affecting = rpc_ialarm.mgmt_affecting
degrade_affecting = rpc_ialarm.degrade_affecting
alarms['service_affecting'] = str(alarms['service_affecting'])
alarms['suppression'] = str(alarms['suppression'])
alm = Alarm(**alarms.as_dict())
if not expand:
alm.unset_fields_except(['uuid', 'alarm_id', 'entity_instance_id',
'severity', 'timestamp', 'reason_text',
'mgmt_affecting ', 'degrade_affecting'])
alm.entity_instance_id = \
api_utils.make_display_id(alm.entity_instance_id, replace=False)
alm.suppression_status = str(suppress_status)
alm.mgmt_affecting = str(
not fm_api.FaultAPIs.alarm_allowed(alm.severity, mgmt_affecting))
alm.degrade_affecting = str(
not fm_api.FaultAPIs.alarm_allowed(alm.severity, degrade_affecting))
return alm
class AlarmCollection(collection.Collection):
"""API representation of a collection of alarm."""
alarms = [Alarm]
"A list containing alarm objects"
def __init__(self, **kwargs):
self._type = 'alarms'
@classmethod
def convert_with_links(cls, ialm, limit, url=None,
expand=False, **kwargs):
# filter masked alarms
ialms = []
for a in ialm:
if isinstance(a, tuple):
ialm_instance = a[0]
else:
ialm_instance = a
if str(ialm_instance['masked']) != 'True':
ialms.append(a)
collection = AlarmCollection()
collection.alarms = [Alarm.convert_with_links(ch, expand)
for ch in ialms]
# url = url or None
collection.next = collection.get_next(limit, url=url, **kwargs)
return collection
LOCK_NAME = 'AlarmController'
class AlarmSummary(base.APIBase):
"""API representation of an alarm summary object."""
critical = wsme.wsattr(int, mandatory=True)
"The count of critical alarms"
major = wsme.wsattr(int, mandatory=True)
"The count of major alarms"
minor = wsme.wsattr(int, mandatory=True)
"The count of minor alarms"
warnings = wsme.wsattr(int, mandatory=True)
"The count of warnings"
status = wsme.wsattr(wtypes.text, mandatory=True)
"The status of the system"
system_uuid = wsme.wsattr(types.uuid, mandatory=True)
"The UUID of the system (for distributed cloud use)"
@classmethod
def convert_with_links(cls, ialm_sum, uuid):
summary = AlarmSummary()
summary.critical = ialm_sum[fm_constants.FM_ALARM_SEVERITY_CRITICAL]
summary.major = ialm_sum[fm_constants.FM_ALARM_SEVERITY_MAJOR]
summary.minor = ialm_sum[fm_constants.FM_ALARM_SEVERITY_MINOR]
summary.warnings = ialm_sum[fm_constants.FM_ALARM_SEVERITY_WARNING]
summary.status = ialm_sum['status']
summary.system_uuid = uuid
return summary
class AlarmController(rest.RestController):
"""REST controller for alarm."""
_custom_actions = {
'detail': ['GET'],
'summary': ['GET'],
}
def _get_alarm_summary(self, include_suppress):
kwargs = {}
kwargs["include_suppress"] = include_suppress
ialm = pecan.request.dbapi.alarm_get_all(**kwargs)
ialm_counts = {fm_constants.FM_ALARM_SEVERITY_CRITICAL: 0,
fm_constants.FM_ALARM_SEVERITY_MAJOR: 0,
fm_constants.FM_ALARM_SEVERITY_MINOR: 0,
fm_constants.FM_ALARM_SEVERITY_WARNING: 0}
# filter masked alarms and sum by severity
for a in ialm:
ialm_instance = a[0]
if str(ialm_instance['masked']) != 'True':
if ialm_instance['severity'] in ialm_counts:
ialm_counts[ialm_instance['severity']] += 1
# Generate the status
status = fm_constants.FM_ALARM_OK_STATUS
if (ialm_counts[fm_constants.FM_ALARM_SEVERITY_MAJOR] > 0) or \
(ialm_counts[fm_constants.FM_ALARM_SEVERITY_MINOR] > 0):
status = fm_constants.FM_ALARM_DEGRADED_STATUS
if ialm_counts[fm_constants.FM_ALARM_SEVERITY_CRITICAL] > 0:
status = fm_constants.FM_ALARM_CRITICAL_STATUS
ialm_counts['status'] = status
system = cgtsclient(pecan.request.context).isystem.list()[0]
uuid = system.uuid
return AlarmSummary.convert_with_links(ialm_counts, uuid)
def _get_alarm_collection(self, marker, limit, sort_key, sort_dir,
expand=False, resource_url=None,
q=None, include_suppress=False):
limit = api_utils.validate_limit(limit)
sort_dir = api_utils.validate_sort_dir(sort_dir)
if isinstance(sort_key, str) and ',' in sort_key:
sort_key = sort_key.split(',')
kwargs = {}
if q is not None:
for i in q:
if i.op == 'eq':
kwargs[i.field] = i.value
kwargs["include_suppress"] = include_suppress
if marker:
marker_obj = objects.alarm.get_by_uuid(pecan.request.context,
marker)
ialm = pecan.request.dbapi.alarm_get_list(
limit, marker_obj,
sort_key=sort_key,
sort_dir=sort_dir,
include_suppress=include_suppress)
else:
kwargs['limit'] = limit
ialm = pecan.request.dbapi.alarm_get_all(**kwargs)
return AlarmCollection.convert_with_links(ialm, limit,
url=resource_url,
expand=expand,
sort_key=sort_key,
sort_dir=sort_dir)
@wsme_pecan.wsexpose(AlarmCollection, [Query],
types.uuid, int, wtypes.text, wtypes.text, bool)
def get_all(self, q=[], marker=None, limit=None, sort_key='id',
sort_dir='asc', include_suppress=False):
"""Retrieve a list of alarm.
:param marker: pagination marker for large data sets.
:param limit: maximum number of resources to return in a single result.
:param sort_key: column to sort results by. Default: id.
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
:param include_suppress: filter on suppressed alarms. Default: False
"""
return self._get_alarm_collection(marker, limit, sort_key,
sort_dir, q=q,
include_suppress=include_suppress)
@wsme_pecan.wsexpose(AlarmCollection, types.uuid, int,
wtypes.text, wtypes.text)
def detail(self, marker=None, limit=None, sort_key='id', sort_dir='asc'):
"""Retrieve a list of alarm with detail.
:param marker: pagination marker for large data sets.
:param limit: maximum number of resources to return in a single result.
:param sort_key: column to sort results by. Default: id.
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
"""
# /detail should only work agaist collections
parent = pecan.request.path.split('/')[:-1][-1]
if parent != "alarm":
raise exceptions.HTTPNotFound
expand = True
resource_url = '/'.join(['alarm', 'detail'])
return self._get_alarm_collection(marker, limit, sort_key, sort_dir,
expand, resource_url)
@wsme_pecan.wsexpose(Alarm, wtypes.text)
def get_one(self, id):
"""Retrieve information about the given alarm.
:param id: UUID of an alarm.
"""
rpc_ialarm = objects.alarm.get_by_uuid(
pecan.request.context, id)
if str(rpc_ialarm['masked']) == 'True':
raise exceptions.HTTPNotFound
return Alarm.convert_with_links(rpc_ialarm)
@wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
def delete(self, id):
"""Delete an alarm.
:param id: uuid of an alarm.
"""
pecan.request.dbapi.alarm_destroy(id)
@wsme_pecan.wsexpose(AlarmSummary, bool)
def summary(self, include_suppress=False):
"""Retrieve a summery of alarms.
:param include_suppress: filter on suppressed alarms. Default: False
"""
return self._get_alarm_summary(include_suppress)

View File

@ -0,0 +1,131 @@
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import datetime
import functools
from webob import exc
import wsme
from wsme import types as wtypes
from oslo_utils._i18n import _
class APIBase(wtypes.Base):
created_at = wsme.wsattr(datetime.datetime, readonly=True)
"""The time in UTC at which the object is created"""
updated_at = wsme.wsattr(datetime.datetime, readonly=True)
"""The time in UTC at which the object is updated"""
def as_dict(self):
"""Render this object as a dict of its fields."""
return dict((k, getattr(self, k))
for k in self.fields
if hasattr(self, k) and
getattr(self, k) != wsme.Unset)
def unset_fields_except(self, except_list=None):
"""Unset fields so they don't appear in the message body.
:param except_list: A list of fields that won't be touched.
"""
if except_list is None:
except_list = []
for k in self.as_dict():
if k not in except_list:
setattr(self, k, wsme.Unset)
@classmethod
def from_rpc_object(cls, m, fields=None):
"""Convert a RPC object to an API object."""
obj_dict = m.as_dict()
# Unset non-required fields so they do not appear
# in the message body
obj_dict.update(dict((k, wsme.Unset)
for k in obj_dict.keys()
if fields and k not in fields))
return cls(**obj_dict)
@functools.total_ordering
class Version(object):
"""API Version object."""
string = 'X-OpenStack-FM-API-Version'
"""HTTP Header string carrying the requested version"""
min_string = 'X-OpenStack-FM-API-Minimum-Version'
"""HTTP response header"""
max_string = 'X-OpenStack-FM-API-Maximum-Version'
"""HTTP response header"""
def __init__(self, headers, default_version, latest_version):
"""Create an API Version object from the supplied headers.
:param headers: webob headers
:param default_version: version to use if not specified in headers
:param latest_version: version to use if latest is requested
:raises: webob.HTTPNotAcceptable
"""
(self.major, self.minor) = Version.parse_headers(
headers, default_version, latest_version)
def __repr__(self):
return '%s.%s' % (self.major, self.minor)
@staticmethod
def parse_headers(headers, default_version, latest_version):
"""Determine the API version requested based on the headers supplied.
:param headers: webob headers
:param default_version: version to use if not specified in headers
:param latest_version: version to use if latest is requested
:returns: a tupe of (major, minor) version numbers
:raises: webob.HTTPNotAcceptable
"""
version_str = headers.get(Version.string, default_version)
if version_str.lower() == 'latest':
parse_str = latest_version
else:
parse_str = version_str
try:
version = tuple(int(i) for i in parse_str.split('.'))
except ValueError:
version = ()
if len(version) != 2:
raise exc.HTTPNotAcceptable(_(
"Invalid value for %s header") % Version.string)
return version
def __gt__(self, other):
return (self.major, self.minor) > (other.major, other.minor)
def __eq__(self, other):
return (self.major, self.minor) == (other.major, other.minor)
def __ne__(self, other):
return not self.__eq__(other)

View File

@ -0,0 +1,58 @@
#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import pecan
from wsme import types as wtypes
from fm.api.controllers.v1 import base
from fm.api.controllers.v1 import link
class Collection(base.APIBase):
next = wtypes.text
"A link to retrieve the next subset of the collection"
@property
def collection(self):
return getattr(self, self._type)
def has_next(self, limit):
"""Return whether collection has more items."""
return len(self.collection) and len(self.collection) == limit
def get_next(self, limit, url=None, **kwargs):
"""Return a link to the next subset of the collection."""
if not self.has_next(limit):
return wtypes.Unset
resource_url = url or self._type
q_args = ''.join(['%s=%s&' % (key, kwargs[key]) for key in kwargs])
next_args = '?%(args)slimit=%(limit)d&marker=%(marker)s' % {
'args': q_args, 'limit': limit,
'marker': self.collection[-1].uuid}
return link.Link.make_link('next', pecan.request.host_url,
resource_url, next_args).href

View File

@ -0,0 +1,292 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import json
import datetime
from oslo_utils import timeutils
from oslo_log import log
import pecan
from pecan import rest
import wsme
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from fm import objects
from fm.api.controllers.v1 import utils
from fm.api.controllers.v1 import base
from fm.api.controllers.v1 import collection
from fm.api.controllers.v1 import link
from fm.api.controllers.v1.query import Query
from fm.api.controllers.v1 import types
from fm.common import exceptions
from fm.common.i18n import _
LOG = log.getLogger(__name__)
def prettyDict(dict):
output = json.dumps(dict, sort_keys=True, indent=4)
return output
class EventLogPatchType(types.JsonPatchType):
pass
class EventLog(base.APIBase):
"""API representation of an event log.
This class enforces type checking and value constraints, and converts
between the internal object model and the API representation of
a event_log.
"""
uuid = types.uuid
"The UUID of the event_log"
event_log_id = wsme.wsattr(wtypes.text, mandatory=True)
"structured id for the event log; AREA_ID ID; 300-001"
state = wsme.wsattr(wtypes.text, mandatory=True)
"The state of the event"
entity_type_id = wtypes.text
"The type of the object event log"
entity_instance_id = wsme.wsattr(wtypes.text, mandatory=True)
"The original instance information of the object creating event log"
timestamp = datetime.datetime
"The time in UTC at which the event log is generated"
severity = wsme.wsattr(wtypes.text, mandatory=True)
"The severity of the log"
reason_text = wtypes.text
"The reason why the log is generated"
event_log_type = wsme.wsattr(wtypes.text, mandatory=True)
"The type of the event log"
probable_cause = wsme.wsattr(wtypes.text, mandatory=True)
"The probable cause of the event log"
proposed_repair_action = wtypes.text
"The action to clear the alarm"
service_affecting = wtypes.text
"Whether the log affects the service"
suppression = wtypes.text
"'allowed' or 'not-allowed'"
suppression_status = wtypes.text
"'suppressed' or 'unsuppressed'"
links = [link.Link]
"A list containing a self link and associated event links"
def __init__(self, **kwargs):
self.fields = objects.event_log.fields.keys()
for k in self.fields:
setattr(self, k, kwargs.get(k))
@classmethod
def convert_with_links(cls, rpc_event_log, expand=True):
if isinstance(rpc_event_log, tuple):
ievent_log = rpc_event_log[0]
suppress_status = rpc_event_log[1]
else:
ievent_log = rpc_event_log
suppress_status = rpc_event_log.suppression_status
ievent_log['service_affecting'] = str(ievent_log['service_affecting'])
ievent_log['suppression'] = str(ievent_log['suppression'])
ilog = EventLog(**ievent_log.as_dict())
if not expand:
ilog.unset_fields_except(['uuid', 'event_log_id', 'entity_instance_id',
'severity', 'timestamp', 'reason_text', 'state'])
ilog.entity_instance_id = \
utils.make_display_id(ilog.entity_instance_id, replace=False)
ilog.suppression_status = str(suppress_status)
return ilog
def _getEventType(alarms=False, logs=False):
if not alarms and not logs:
return "ALL"
if alarms and logs:
return "ALL"
if logs:
return "LOG"
if alarms:
return "ALARM"
return "ALL"
class EventLogCollection(collection.Collection):
"""API representation of a collection of event_log."""
event_log = [EventLog]
"A list containing event_log objects"
def __init__(self, **kwargs):
self._type = 'event_log'
@classmethod
def convert_with_links(cls, ilog, limit=None, url=None,
expand=False, **kwargs):
ilogs = []
for a in ilog:
ilogs.append(a)
collection = EventLogCollection()
collection.event_log = [EventLog.convert_with_links(ch, expand)
for ch in ilogs]
collection.next = collection.get_next(limit, url=url, **kwargs)
return collection
def _handle_bad_input_date(f):
"""
A decorator that executes function f and returns
a more human readable error message on a SQL date exception
"""
def date_handler_wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
import re
e_str = "{}".format(e)
for r in [".*date/time field value out of range: \"(.*)\".*LINE",
".*invalid input syntax for type timestamp: \"(.*)\".*",
".*timestamp out of range: \"(.*)\".*"]:
p = re.compile(r, re.DOTALL)
m = p.match(e_str)
if m and len(m.groups()) > 0:
bad_date = m.group(1)
raise wsme.exc.ClientSideError(_(
"Invalid date '{}' specified".format(bad_date)))
raise
return date_handler_wrapper
class EventLogController(rest.RestController):
"""REST controller for eventlog."""
_custom_actions = {
'detail': ['GET'],
}
@_handle_bad_input_date
def _get_eventlog_collection(self, marker, limit, sort_key, sort_dir,
expand=False, resource_url=None,
q=None, alarms=False, logs=False,
include_suppress=False):
if limit and limit < 0:
raise wsme.exc.ClientSideError(_("Limit must be positive"))
sort_dir = utils.validate_sort_dir(sort_dir)
kwargs = {}
if q is not None:
for i in q:
if i.op == 'eq':
if i.field == 'start' or i.field == 'end':
val = timeutils.normalize_time(
timeutils.parse_isotime(i.value)
.replace(tzinfo=None))
i.value = val.isoformat()
kwargs[i.field] = i.value
evtType = _getEventType(alarms, logs)
kwargs["evtType"] = evtType
kwargs["include_suppress"] = include_suppress
if marker:
marker_obj = objects.event_log.get_by_uuid(pecan.request.context,
marker)
ilog = pecan.request.dbapi.event_log_get_list(
limit, marker_obj,
sort_key=sort_key,
sort_dir=sort_dir,
evtType=evtType,
include_suppress=include_suppress)
else:
kwargs['limit'] = limit
ilog = pecan.request.dbapi.event_log_get_all(**kwargs)
return EventLogCollection.convert_with_links(ilog, limit,
url=resource_url,
expand=expand,
sort_key=sort_key,
sort_dir=sort_dir)
@wsme_pecan.wsexpose(EventLogCollection, [Query],
types.uuid, int, wtypes.text, wtypes.text,
bool, bool, bool)
def get_all(self, q=[], marker=None, limit=None, sort_key='timestamp',
sort_dir='desc', alarms=False, logs=False,
include_suppress=False):
"""Retrieve a list of event_log.
:param marker: pagination marker for large data sets.
:param limit: maximum number of resources to return in a single result.
:param sort_key: column to sort results by. Default: id.
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
:param alarms: filter on alarms. Default: False
:param logs: filter on logs. Default: False
:param include_suppress: filter on suppressed alarms. Default: False
"""
return self._get_eventlog_collection(marker, limit, sort_key,
sort_dir, q=q, alarms=alarms,
logs=logs,
include_suppress=include_suppress)
@wsme_pecan.wsexpose(EventLogCollection, types.uuid, int,
wtypes.text, wtypes.text, bool, bool)
def detail(self, marker=None, limit=None, sort_key='id', sort_dir='asc',
alarms=False, logs=False):
"""Retrieve a list of event_log with detail.
:param marker: pagination marker for large data sets.
:param limit: maximum number of resources to return in a single result.
:param sort_key: column to sort results by. Default: id.
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
:param alarms: filter on alarms. Default: False
:param logs: filter on logs. Default: False
"""
# /detail should only work against collections
parent = pecan.request.path.split('/')[:-1][-1]
if parent != "event_log":
raise exceptions.HTTPNotFound
expand = True
resource_url = '/'.join(['event_log', 'detail'])
return self._get_eventlog_collection(marker, limit, sort_key, sort_dir,
expand, resource_url, None,
alarms, logs)
@wsme_pecan.wsexpose(EventLog, wtypes.text)
def get_one(self, id):
"""Retrieve information about the given event_log.
:param id: UUID of an event_log.
"""
rpc_ilog = objects.event_log.get_by_uuid(
pecan.request.context, id)
return EventLog.convert_with_links(rpc_ilog)

View File

@ -0,0 +1,215 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import pecan
from pecan import rest
import wsme
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from oslo_log import log
from fm import objects
from fm.api.controllers.v1 import base
from fm.api.controllers.v1 import collection
from fm.api.controllers.v1 import link
from fm.api.controllers.v1.query import Query
from fm.api.controllers.v1 import types
from fm.api.controllers.v1 import utils as api_utils
from fm.common import constants
from fm.common import utils as cutils
from fm.common.i18n import _
LOG = log.getLogger(__name__)
class EventSuppressionPatchType(types.JsonPatchType):
@staticmethod
def mandatory_attrs():
return ['/uuid']
class EventSuppression(base.APIBase):
"""API representation of an event suppression.
This class enforces type checking and value constraints, and converts
between the internal object model and the API representation of
an event_suppression.
"""
id = int
"Unique ID for this entry"
uuid = types.uuid
"Unique UUID for this entry"
alarm_id = wsme.wsattr(wtypes.text, mandatory=True)
"Unique id for the Alarm Type"
description = wsme.wsattr(wtypes.text, mandatory=True)
"Text description of the Alarm Type"
suppression_status = wsme.wsattr(wtypes.text, mandatory=True)
"'suppressed' or 'unsuppressed'"
links = [link.Link]
"A list containing a self link and associated links"
def __init__(self, **kwargs):
self.fields = objects.event_suppression.fields.keys()
for k in self.fields:
if not hasattr(self, k):
continue
setattr(self, k, kwargs.get(k, wtypes.Unset))
@classmethod
def convert_with_links(cls, rpc_event_suppression, expand=True):
parm = EventSuppression(**rpc_event_suppression.as_dict())
if not expand:
parm.unset_fields_except(['uuid', 'alarm_id', 'description',
'suppression_status'])
parm.links = [link.Link.make_link('self', pecan.request.host_url,
'event_suppression', parm.uuid),
link.Link.make_link('bookmark',
pecan.request.host_url,
'event_suppression', parm.uuid,
bookmark=True)
]
return parm
class EventSuppressionCollection(collection.Collection):
"""API representation of a collection of event_suppression."""
event_suppression = [EventSuppression]
"A list containing EventSuppression objects"
def __init__(self, **kwargs):
self._type = 'event_suppression'
@classmethod
def convert_with_links(cls, rpc_event_suppression, limit, url=None,
expand=False,
**kwargs):
collection = EventSuppressionCollection()
collection.event_suppression = [EventSuppression.convert_with_links(p, expand)
for p in rpc_event_suppression]
collection.next = collection.get_next(limit, url=url, **kwargs)
return collection
LOCK_NAME = 'EventSuppressionController'
class EventSuppressionController(rest.RestController):
"""REST controller for event_suppression."""
def __init__(self, parent=None, **kwargs):
self._parent = parent
def _get_event_suppression_collection(self, marker=None, limit=None,
sort_key=None, sort_dir=None,
expand=False, resource_url=None,
q=None):
limit = api_utils.validate_limit(limit)
sort_dir = api_utils.validate_sort_dir(sort_dir)
kwargs = {}
if q is not None:
for i in q:
if i.op == 'eq':
kwargs[i.field] = i.value
marker_obj = None
if marker:
marker_obj = objects.event_suppression.get_by_uuid(
pecan.request.context, marker)
if q is None:
parms = pecan.request.dbapi.event_suppression_get_list(
limit=limit, marker=marker_obj,
sort_key=sort_key, sort_dir=sort_dir)
else:
kwargs['limit'] = limit
kwargs['sort_key'] = sort_key
kwargs['sort_dir'] = sort_dir
parms = pecan.request.dbapi.event_suppression_get_all(**kwargs)
return EventSuppressionCollection.convert_with_links(
parms, limit, url=resource_url, expand=expand,
sort_key=sort_key, sort_dir=sort_dir)
def _get_updates(self, patch):
"""Retrieve the updated attributes from the patch request."""
updates = {}
for p in patch:
attribute = p['path'] if p['path'][0] != '/' else p['path'][1:]
updates[attribute] = p['value']
return updates
@staticmethod
def _check_event_suppression_updates(updates):
"""Check attributes to be updated"""
for parameter in updates:
if parameter == 'suppression_status':
if not((updates.get(parameter) == constants.FM_SUPPRESSED) or
(updates.get(parameter) == constants.FM_UNSUPPRESSED)):
msg = _("Invalid event_suppression parameter "
"suppression_status values. Valid values are: "
"suppressed, unsuppressed")
raise wsme.exc.ClientSideError(msg)
elif parameter == 'alarm_id':
msg = _("event_suppression parameter alarm_id is not allowed "
"to be updated.")
raise wsme.exc.ClientSideError(msg)
elif parameter == 'description':
msg = _("event_suppression parameter description is not "
"allowed to be updated.")
raise wsme.exc.ClientSideError(msg)
else:
msg = _("event_suppression invalid parameter.")
raise wsme.exc.ClientSideError(msg)
@wsme_pecan.wsexpose(EventSuppressionCollection, [Query],
types.uuid, wtypes.text,
wtypes.text, wtypes.text, wtypes.text)
def get_all(self, q=[], marker=None, limit=None,
sort_key='id', sort_dir='asc'):
"""Retrieve a list of event_suppression."""
sort_key = ['alarm_id']
return self._get_event_suppression_collection(marker, limit,
sort_key,
sort_dir, q=q)
@wsme_pecan.wsexpose(EventSuppression, types.uuid)
def get_one(self, uuid):
"""Retrieve information about the given event_suppression."""
rpc_event_suppression = objects.event_suppression.get_by_uuid(
pecan.request.context, uuid)
return EventSuppression.convert_with_links(rpc_event_suppression)
@cutils.synchronized(LOCK_NAME)
@wsme.validate(types.uuid, [EventSuppressionPatchType])
@wsme_pecan.wsexpose(EventSuppression, types.uuid,
body=[EventSuppressionPatchType])
def patch(self, uuid, patch):
"""Updates attributes of event_suppression."""
event_suppression = objects.event_suppression.get_by_uuid(
pecan.request.context, uuid)
event_suppression = event_suppression.as_dict()
updates = self._get_updates(patch)
self._check_event_suppression_updates(updates)
event_suppression.update(updates)
updated_event_suppression = \
pecan.request.dbapi.event_suppression_update(uuid, updates)
return EventSuppression.convert_with_links(updated_event_suppression)

View File

@ -0,0 +1,58 @@
# Copyright 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from wsme import types as wtypes
from fm.api.controllers.v1 import base
def build_url(resource, resource_args, bookmark=False, base_url=None):
if base_url is None:
base_url = pecan.request.public_url
template = '%(url)s/%(res)s' if bookmark else '%(url)s/v1/%(res)s'
# FIXME(lucasagomes): I'm getting a 404 when doing a GET on
# a nested resource that the URL ends with a '/'.
# https://groups.google.com/forum/#!topic/pecan-dev/QfSeviLg5qs
template += '%(args)s' if resource_args.startswith('?') else '/%(args)s'
return template % {'url': base_url, 'res': resource, 'args': resource_args}
class Link(base.APIBase):
"""A link representation."""
href = wtypes.text
"""The url of a link."""
rel = wtypes.text
"""The name of a link."""
type = wtypes.text
"""Indicates the type of document/link."""
@staticmethod
def make_link(rel_name, url, resource, resource_args,
bookmark=False, type=wtypes.Unset):
href = build_url(resource, resource_args,
bookmark=bookmark, base_url=url)
return Link(href=href, rel=rel_name, type=type)
@classmethod
def sample(cls):
sample = cls(href="http://localhost:18002"
"eeaca217-e7d8-47b4-bb41-3f99f20ead81",
rel="bookmark")
return sample

View File

@ -0,0 +1,176 @@
# coding: utf-8
# Copyright © 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright © 2013 eNovance <licensing@enovance.com>
# Copyright Ericsson AB 2013. All rights reserved
#
# Authors: Doug Hellmann <doug.hellmann@dreamhost.com>
# Angus Salkeld <asalkeld@redhat.com>
# Eoghan Glynn <eglynn@redhat.com>
# Julien Danjou <julien@danjou.info>
# Ildiko Vancsa <ildiko.vancsa@ericsson.com>
# Balazs Gibizer <balazs.gibizer@ericsson.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import inspect
import functools
import six
import ast
import wsme
from wsme import types as wtypes
from oslo_utils import strutils
from oslo_utils import timeutils
from oslo_log import log
from fm.common.i18n import _
LOG = log.getLogger(__name__)
operation_kind = wtypes.Enum(str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt')
class _Base(wtypes.Base):
@classmethod
def from_db_model(cls, m):
return cls(**(m.as_dict()))
@classmethod
def from_db_and_links(cls, m, links):
return cls(links=links, **(m.as_dict()))
def as_dict(self, db_model):
valid_keys = inspect.getargspec(db_model.__init__)[0]
if 'self' in valid_keys:
valid_keys.remove('self')
return self.as_dict_from_keys(valid_keys)
def as_dict_from_keys(self, keys):
return dict((k, getattr(self, k))
for k in keys
if hasattr(self, k) and
getattr(self, k) != wsme.Unset)
class Query(_Base):
"""Query filter.
"""
# The data types supported by the query.
_supported_types = ['integer', 'float', 'string', 'boolean']
# Functions to convert the data field to the correct type.
_type_converters = {'integer': int,
'float': float,
'boolean': functools.partial(
strutils.bool_from_string, strict=True),
'string': six.text_type,
'datetime': timeutils.parse_isotime}
_op = None # provide a default
def get_op(self):
return self._op or 'eq'
def set_op(self, value):
self._op = value
field = wtypes.text
"The name of the field to test"
# op = wsme.wsattr(operation_kind, default='eq')
# this ^ doesn't seem to work.
op = wsme.wsproperty(operation_kind, get_op, set_op)
"The comparison operator. Defaults to 'eq'."
value = wtypes.text
"The value to compare against the stored data"
type = wtypes.text
"The data type of value to compare against the stored data"
def __repr__(self):
# for logging calls
return '<Query %r %s %r %s>' % (self.field,
self.op,
self.value,
self.type)
@classmethod
def sample(cls):
return cls(field='resource_id',
op='eq',
value='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
type='string'
)
def as_dict(self):
return self.as_dict_from_keys(['field', 'op', 'type', 'value'])
def _get_value_as_type(self, forced_type=None):
"""Convert metadata value to the specified data type.
This method is called during metadata query to help convert the
querying metadata to the data type specified by user. If there is no
data type given, the metadata will be parsed by ast.literal_eval to
try to do a smart converting.
NOTE (flwang) Using "_" as prefix to avoid an InvocationError raised
from wsmeext/sphinxext.py. It's OK to call it outside the Query class.
Because the "public" side of that class is actually the outside of the
API, and the "private" side is the API implementation. The method is
only used in the API implementation, so it's OK.
:returns: metadata value converted with the specified data type.
"""
type = forced_type or self.type
try:
converted_value = self.value
if not type:
try:
converted_value = ast.literal_eval(self.value)
except (ValueError, SyntaxError):
msg = _('Failed to convert the metadata value %s'
' automatically') % (self.value)
LOG.debug(msg)
else:
if type not in self._supported_types:
# Types must be explicitly declared so the
# correct type converter may be used. Subclasses
# of Query may define _supported_types and
# _type_converters to define their own types.
raise TypeError()
converted_value = self._type_converters[type](self.value)
except ValueError:
msg = _('Failed to convert the value %(value)s'
' to the expected data type %(type)s.') % \
{'value': self.value, 'type': type}
raise wsme.exc.ClientSideError(msg)
except TypeError:
msg = _('The data type %(type)s is not supported. The supported'
' data type list is: %(supported)s') % \
{'type': type, 'supported': self._supported_types}
raise wsme.exc.ClientSideError(msg)
except Exception:
msg = _('Unexpected exception converting %(value)s to'
' the expected data type %(type)s.') % \
{'value': self.value, 'type': type}
raise wsme.exc.ClientSideError(msg)
return converted_value

View File

@ -0,0 +1,49 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from oslo_config import cfg
from oslo_log import log
from keystoneauth1 import loading as ks_loading
from cgtsclient.v1 import client as cgts_client
from fm.api import config
CONF = cfg.CONF
LOG = log.getLogger(__name__)
_SESSION = None
def cgtsclient(context, version=1, endpoint=None):
"""Constructs a cgts client object for making API requests.
:param context: The FM request context for auth.
:param version: API endpoint version.
:param endpoint: Optional If the endpoint is not available, it will be
retrieved from session
"""
global _SESSION
if not _SESSION:
_SESSION = ks_loading.load_session_from_conf_options(
CONF, config.sysinv_group.name)
auth_token = context.auth_token
if endpoint is None:
auth = context.get_auth_plugin()
service_type, service_name, interface = \
CONF.sysinv.catalog_info.split(':')
service_parameters = {'service_type': service_type,
'service_name': service_name,
'interface': interface,
'region_name': CONF.sysinv.os_region_name}
endpoint = _SESSION.get_endpoint(auth, **service_parameters)
return cgts_client.Client(version=version,
endpoint=endpoint,
token=auth_token)

View File

@ -0,0 +1,173 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# coding: utf-8
#
# Copyright 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import inspect
import json
import six
import wsme
from wsme import types as wtypes
from oslo_utils import strutils
from oslo_utils import uuidutils
from fm.common.i18n import _
from fm.common import exceptions
class UuidType(wtypes.UserType):
"""A simple UUID type."""
basetype = wtypes.text
name = 'uuid'
@staticmethod
def validate(value):
if not uuidutils.is_uuid_like(value):
raise exceptions.Invalid(uuid=value)
return value
@staticmethod
def frombasetype(value):
if value is None:
return None
return UuidType.validate(value)
class BooleanType(wtypes.UserType):
"""A simple boolean type."""
basetype = wtypes.text
name = 'boolean'
@staticmethod
def validate(value):
try:
return strutils.bool_from_string(value, strict=True)
except ValueError as e:
# raise Invalid to return 400 (BadRequest) in the API
raise exceptions.Invalid(six.text_type(e))
@staticmethod
def frombasetype(value):
if value is None:
return None
return BooleanType.validate(value)
class JsonType(wtypes.UserType):
"""A simple JSON type."""
basetype = wtypes.text
name = 'json'
def __str__(self):
# These are the json serializable native types
return ' | '.join(map(str, (wtypes.text, six.integer_types, float,
BooleanType, list, dict, None)))
@staticmethod
def validate(value):
try:
json.dumps(value)
except TypeError:
raise exceptions.Invalid(_('%s is not JSON serializable') % value)
else:
return value
@staticmethod
def frombasetype(value):
return JsonType.validate(value)
jsontype = JsonType()
uuid = UuidType()
class JsonPatchType(wtypes.Base):
"""A complex type that represents a single json-patch operation."""
path = wtypes.wsattr(wtypes.StringType(pattern='^(/[\w-]+)+$'),
mandatory=True)
op = wtypes.wsattr(wtypes.Enum(str, 'add', 'replace', 'remove'),
mandatory=True)
value = wsme.wsattr(jsontype, default=wtypes.Unset)
# The class of the objects being patched. Override this in subclasses.
_api_base = None
# Attributes that are not required for construction, but which may not be
# removed if set. Override in subclasses if needed.
_extra_non_removable_attrs = set()
# Set of non-removable attributes, calculated lazily.
_non_removable_attrs = None
@staticmethod
def internal_attrs():
"""Returns a list of internal attributes.
Internal attributes can't be added, replaced or removed. This
method may be overwritten by derived class.
"""
return ['/created_at', '/id', '/links', '/updated_at', '/uuid']
@classmethod
def non_removable_attrs(cls):
"""Returns a set of names of attributes that may not be removed.
Attributes whose 'mandatory' property is True are automatically added
to this set. To add additional attributes to the set, override the
field _extra_non_removable_attrs in subclasses, with a set of the form
{'/foo', '/bar'}.
"""
if cls._non_removable_attrs is None:
cls._non_removable_attrs = cls._extra_non_removable_attrs.copy()
if cls._api_base:
fields = inspect.getmembers(cls._api_base,
lambda a: not inspect.isroutine(a))
for name, field in fields:
if getattr(field, 'mandatory', False):
cls._non_removable_attrs.add('/%s' % name)
return cls._non_removable_attrs
@staticmethod
def validate(patch):
_path = '/' + patch.path.split('/')[1]
if _path in patch.internal_attrs():
msg = _("'%s' is an internal attribute and can not be updated")
raise wsme.exc.ClientSideError(msg % patch.path)
if patch.path in patch.non_removable_attrs() and patch.op == 'remove':
msg = _("'%s' is a mandatory attribute and can not be removed")
raise wsme.exc.ClientSideError(msg % patch.path)
if patch.op != 'remove':
if patch.value is wsme.Unset:
msg = _("'add' and 'replace' operations need a value")
raise wsme.exc.ClientSideError(msg)
ret = {'path': patch.path, 'op': patch.op}
if patch.value is not wsme.Unset:
ret['value'] = patch.value
return ret

View File

@ -0,0 +1,152 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import sys
import contextlib
import traceback
import pecan
import wsme
from oslo_config import cfg
from oslo_log import log
from oslo_utils import uuidutils
from fm.api.controllers.v1.sysinv import cgtsclient
from fm.common import exceptions
from fm.common.i18n import _
CONF = cfg.CONF
LOG = log.getLogger(__name__)
ALARM_ENTITY_TYPES_USE_UUID = ['port']
ENTITY_SEP = '.'
KEY_VALUE_SEP = '='
@contextlib.contextmanager
def save_and_reraise_exception():
"""Save current exception, run some code and then re-raise.
In some cases the exception context can be cleared, resulting in None
being attempted to be re-raised after an exception handler is run. This
can happen when eventlet switches greenthreads or when running an
exception handler, code raises and catches an exception. In both
cases the exception context will be cleared.
To work around this, we save the exception state, run handler code, and
then re-raise the original exception. If another exception occurs, the
saved exception is logged and the new exception is re-raised.
"""
type_, value, tb = sys.exc_info()
try:
yield
except Exception:
LOG.error(_('Original exception being dropped: %s'),
traceback.format_exception(type_, value, tb))
raise
raise (type_, value, tb)
def validate_limit(limit):
if limit and limit < 0:
raise wsme.exc.ClientSideError(_("Limit must be positive"))
return min(CONF.api.limit_max, limit) or CONF.api.limit_max
def validate_sort_dir(sort_dir):
if sort_dir not in ['asc', 'desc']:
raise wsme.exc.ClientSideError(_("Invalid sort direction: %s. "
"Acceptable values are "
"'asc' or 'desc'") % sort_dir)
return sort_dir
def _get_port(host_name, port_name):
hosts = cgtsclient(pecan.request.context).ihost.list()
for h in hosts:
if h.hostname == host_name:
ports = cgtsclient(pecan.request.context).port.list(h.uuid)
for p in ports:
if p.name == port_name:
return p
return None
def make_display_id(iid, replace=False):
if replace:
instance_id = replace_uuids(iid)
else:
instance_id = replace_name_with_uuid(iid)
return instance_id
def replace_name_with_uuid(instance_id):
hName = None
port = None
for keyvalue in instance_id.split(ENTITY_SEP):
try:
(key, value) = keyvalue.split(KEY_VALUE_SEP, 1)
except ValueError:
return instance_id
if key == 'host':
hName = value
elif key == 'port':
if hName and not uuidutils.is_uuid_like(value.strip()):
try:
port = _get_port(hName, value)
except exceptions.NodeNotFound:
LOG.error("Can't find the host by name %s", hName)
pass
except exceptions.ServerNotFound:
LOG.error("Can't find the port for name %s", value)
pass
if port:
new_id = key + KEY_VALUE_SEP + port.uuid
instance_id = instance_id.replace(keyvalue, new_id, 1)
return instance_id
def replace_uuid_with_name(key, value):
new_id = None
if key == 'port':
port = None
try:
port = cgtsclient(pecan.request.context).port.get(value)
except exceptions.ServerNotFound:
LOG.error("Can't find the port for uuid %s", value)
pass
if port is not None:
new_id = key + KEY_VALUE_SEP + port.name
return new_id
def replace_uuids(instance_id):
for keyvalue in instance_id.split(ENTITY_SEP):
try:
(key, value) = keyvalue.split(KEY_VALUE_SEP, 1)
except ValueError:
return instance_id
if key in ALARM_ENTITY_TYPES_USE_UUID:
if uuidutils.is_uuid_like(value.strip()):
new_id = replace_uuid_with_name(key, value)
else:
new_id = key + KEY_VALUE_SEP + value
if new_id is not None:
instance_id = instance_id.replace(keyvalue, new_id, 1)
return instance_id

View File

@ -0,0 +1,88 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import webob
from pecan import hooks
from oslo_config import cfg
from oslo_log import log
from oslo_serialization import jsonutils
from fm.common import context
from fm.db import api as dbapi
from fm.common.i18n import _
CONF = cfg.CONF
LOG = log.getLogger(__name__)
class ContextHook(hooks.PecanHook):
"""Configures a request context and attaches it to the request.
The following HTTP request headers are used:
X-User-Name:
Used for context.user_name.
X-User-Id:
Used for context.user_id.
X-Project-Name:
Used for context.project.
X-Project-Id:
Used for context.project_id.
X-Auth-Token:
Used for context.auth_token.# Copyright (c) 2013-2014 Wind River Systems, Inc.
X-Roles:
Used for context.roles.
"""
def before(self, state):
headers = state.request.headers
environ = state.request.environ
user_name = headers.get('X-User-Name')
user_id = headers.get('X-User-Id')
project = headers.get('X-Project-Name')
project_id = headers.get('X-Project-Id')
domain_id = headers.get('X-User-Domain-Id')
domain_name = headers.get('X-User-Domain-Name')
auth_token = headers.get('X-Auth-Token')
roles = headers.get('X-Roles', '').split(',')
catalog_header = headers.get('X-Service-Catalog')
service_catalog = None
if catalog_header:
try:
service_catalog = jsonutils.loads(catalog_header)
except ValueError:
raise webob.exc.HTTPInternalServerError(
_('Invalid service catalog json.'))
auth_token_info = environ.get('keystone.token_info')
auth_url = CONF.keystone_authtoken.auth_uri
state.request.context = context.make_context(
auth_token=auth_token,
auth_url=auth_url,
auth_token_info=auth_token_info,
user_name=user_name,
user_id=user_id,
project_name=project,
project_id=project_id,
domain_id=domain_id,
domain_name=domain_name,
roles=roles,
service_catalog=service_catalog
)
class DBHook(hooks.PecanHook):
"""Attach the dbapi object to the request so controllers can get to it."""
def before(self, state):
state.request.dbapi = dbapi.get_instance()

View File

@ -0,0 +1,5 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#

View File

@ -0,0 +1,75 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import re
from keystonemiddleware import auth_token
from oslo_log import log
from fm.common import exceptions
from fm.common import utils
from fm.common.i18n import _
LOG = log.getLogger(__name__)
class AuthTokenMiddleware(auth_token.AuthProtocol):
"""A wrapper on Keystone auth_token middleware.
Does not perform verification of authentication tokens
for public routes in the API.
"""
def __init__(self, app, conf, public_api_routes=None):
if public_api_routes is None:
public_api_routes = []
route_pattern_tpl = '%s(\.json)?$'
try:
self.public_api_routes = [re.compile(route_pattern_tpl % route_tpl)
for route_tpl in public_api_routes]
except re.error as e:
msg = _('Cannot compile public API routes: %s') % e
LOG.error(msg)
raise exceptions.ConfigInvalid(error_msg=msg)
super(AuthTokenMiddleware, self).__init__(app, conf)
def __call__(self, env, start_response):
path = utils.safe_rstrip(env.get('PATH_INFO'), '/')
# The information whether the API call is being performed against the
# public API is required for some other components. Saving it to the
# WSGI environment is reasonable thereby.
env['is_public_api'] = any(map(lambda pattern: re.match(pattern, path),
self.public_api_routes))
if env['is_public_api']:
return self._app(env, start_response)
return super(AuthTokenMiddleware, self).__call__(env, start_response)
@classmethod
def factory(cls, global_config, **local_conf):
public_routes = local_conf.get('acl_public_routes', '')
public_api_routes = [path.strip() for path in public_routes.split(',')]
def _factory(app):
return cls(app, global_config, public_api_routes=public_api_routes)
return _factory

View File

@ -0,0 +1,5 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#

View File

@ -0,0 +1,76 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import sys
import eventlet
from oslo_config import cfg
from oslo_log import log as logging
from oslo_service import systemd
from oslo_service import wsgi
import logging as std_logging
from fm.common.i18n import _
from fm.api import app
from fm.api import config
api_opts = [
cfg.StrOpt('bind_host',
default="0.0.0.0",
help=_('IP address for fm api to listen')),
cfg.IntOpt('bind_port',
default=18002,
help=_('listen port for fm api')),
cfg.IntOpt('api_workers', default=2,
help=_("number of api workers")),
cfg.IntOpt('limit_max',
default=2000,
help='the maximum number of items returned in a single '
'response from a collection resource')
]
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
eventlet.monkey_patch(os=False)
def main():
config.init(sys.argv[1:])
config.setup_logging()
application = app.load_paste_app()
CONF.register_opts(api_opts, 'api')
host = CONF.api.bind_host
port = CONF.api.bind_port
workers = CONF.api.api_workers
if workers < 1:
LOG.warning("Wrong worker number, worker = %(workers)s", workers)
workers = 1
LOG.info("Server on http://%(host)s:%(port)s with %(workers)s",
{'host': host, 'port': port, 'workers': workers})
systemd.notify_once()
service = wsgi.Server(CONF, CONF.prog, application, host, port)
app.serve(service, CONF, workers)
LOG.debug("Configuration:")
CONF.log_opt_values(LOG, std_logging.DEBUG)
app.wait()
if __name__ == '__main__':
main()

View File

@ -0,0 +1,18 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import sys
from oslo_config import cfg
from fm.db import migration
CONF = cfg.CONF
def main():
cfg.CONF(sys.argv[1:], project='fm')
migration.db_sync()

View File

@ -0,0 +1,5 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#

View File

@ -0,0 +1,19 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import os
import tsconfig.tsconfig as tsc
FM_SUPPRESSED = 'suppressed'
FM_UNSUPPRESSED = 'unsuppressed'
DB_SUPPRESS_STATUS = 1
DB_MGMT_AFFECTING = 2
DB_DEGRADE_AFFECTING = 3
FM_LOCK_PATH = os.path.join(tsc.VOLATILE_PATH, "fm")

View File

@ -0,0 +1,138 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from oslo_context import context
from oslo_config import cfg
from keystoneauth1 import plugin
from keystoneauth1.access import service_catalog as k_service_catalog
from fm.common import policy
CONF = cfg.CONF
class _ContextAuthPlugin(plugin.BaseAuthPlugin):
"""A keystoneauth auth plugin that uses the values from the Context.
Ideally we would use the plugin provided by auth_token middleware however
this plugin isn't serialized yet so we construct one from the serialized
auth data.
"""
def __init__(self, auth_token, sc):
super(_ContextAuthPlugin, self).__init__()
self.auth_token = auth_token
self.service_catalog = k_service_catalog.ServiceCatalogV2(sc)
def get_token(self, *args, **kwargs):
return self.auth_token
def get_endpoint(self, session, service_type=None, interface=None,
region_name=None, service_name=None, **kwargs):
return self.service_catalog.url_for(service_type=service_type,
service_name=service_name,
interface=interface,
region_name=region_name)
class RequestContext(context.RequestContext):
"""Extends security contexts from the OpenStack common library."""
def __init__(self, auth_token=None, auth_url=None, domain_id=None,
domain_name=None, user_name=None, user_id=None,
user_domain_name=None, user_domain_id=None,
project_name=None, project_id=None, roles=None,
is_admin=None, read_only=False, show_deleted=False,
request_id=None, trust_id=None, auth_token_info=None,
all_tenants=False, password=None, service_catalog=None,
user_auth_plugin=None, **kwargs):
"""Stores several additional request parameters:
:param domain_id: The ID of the domain.
:param domain_name: The name of the domain.
:param user_domain_id: The ID of the domain to
authenticate a user against.
:param user_domain_name: The name of the domain to
authenticate a user against.
"""
super(RequestContext, self).__init__(auth_token=auth_token,
user=user_name,
tenant=project_name,
is_admin=is_admin,
read_only=read_only,
show_deleted=show_deleted,
request_id=request_id,
roles=roles)
self.user_name = user_name
self.user_id = user_id
self.project_name = project_name
self.project_id = project_id
self.domain_id = domain_id
self.domain_name = domain_name
self.user_domain_id = user_domain_id
self.user_domain_name = user_domain_name
self.auth_url = auth_url
self.auth_token_info = auth_token_info
self.trust_id = trust_id
self.all_tenants = all_tenants
self.password = password
if service_catalog:
# Only include required parts of service_catalog
self.service_catalog = [s for s in service_catalog
if s.get('type') in
('platform', )]
else:
# if list is empty or none
self.service_catalog = []
self.user_auth_plugin = user_auth_plugin
if is_admin is None:
self.is_admin = policy.check_is_admin(self)
else:
self.is_admin = is_admin
def to_dict(self):
value = super(RequestContext, self).to_dict()
value.update({'auth_token': self.auth_token,
'auth_url': self.auth_url,
'domain_id': self.domain_id,
'domain_name': self.domain_name,
'user_domain_id': self.user_domain_id,
'user_domain_name': self.user_domain_name,
'user_name': self.user_name,
'user_id': self.user_id,
'project_name': self.project_name,
'project_id': self.project_id,
'is_admin': self.is_admin,
'read_only': self.read_only,
'roles': self.roles,
'show_deleted': self.show_deleted,
'request_id': self.request_id,
'trust_id': self.trust_id,
'auth_token_info': self.auth_token_info,
'password': self.password,
'all_tenants': self.all_tenants,
'service_catalog': self.service_catalog})
return value
@classmethod
def from_dict(cls, values):
return cls(**values)
def get_auth_plugin(self):
if self.user_auth_plugin:
return self.user_auth_plugin
else:
return _ContextAuthPlugin(self.auth_token, self.service_catalog)
def make_context(*args, **kwargs):
return RequestContext(*args, **kwargs)

View File

@ -0,0 +1,109 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import six
import webob.exc
from oslo_utils._i18n import _
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class ApiError(Exception):
message = _("An unknown exception occurred.")
code = webob.exc.HTTPInternalServerError
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if 'code' not in self.kwargs and hasattr(self, 'code'):
self.kwargs['code'] = self.code
if message:
self.message = message
try:
super(ApiError, self).__init__(self.message % kwargs)
self.message = self.message % kwargs
except Exception:
LOG.exception('Exception in string format operation, '
'kwargs: %s', kwargs)
raise
def __str__(self):
return repr(self.value)
def __unicode__(self):
return self.message
def format_message(self):
if self.__class__.__name__.endswith('_Remote'):
return self.args[0]
else:
return six.text_type(self)
class NotFound(ApiError):
message = _("Resource could not be found.")
code = webob.exc.HTTPNotFound
class HTTPNotFound(NotFound):
pass
class AlarmNotFound(NotFound):
message = _("Alarm %(alarm)s could not be found.")
class EventLogNotFound(NotFound):
message = _("Event Log %(eventLog)s could not be found.")
class NodeNotFound(NotFound):
message = _("Node %(node)s could not be found.")
class ServerNotFound(NotFound):
message = _("Server %(server)s could not be found.")
class Invalid(ApiError):
message = _("Unacceptable parameters.")
code = webob.exc.HTTPBadRequest
class PatchError(Invalid):
message = _("Couldn't apply patch '%(patch)s'. Reason: %(reason)s")
class ConfigInvalid(Invalid):
message = _("Invalid configuration file. %(error_msg)s")
class InvalidParameterValue(Invalid):
message = _("%(err)s")
class InvalidIdentity(Invalid):
message = _("Expected an uuid or int but received %(identity)s.")
class PolicyNotAuthorized(ApiError):
message = _("Policy doesn't allow %(action)s to be performed.")
code = webob.exc.HTTPUnauthorized
class Conflict(ApiError):
message = _('HTTP Conflict.')
code = webob.exc.HTTPConflict
class AlarmAlreadyExists(Conflict):
message = _("An Alarm with UUID %(uuid)s already exists.")

View File

@ -0,0 +1,12 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='fm')
# The primary translation function using the well-known name "_"
_ = _translators.primary

View File

@ -0,0 +1,89 @@
# Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
"""Policy Engine For FM."""
from oslo_config import cfg
from oslo_policy import policy
from oslo_log import log
base_rules = [
policy.RuleDefault('admin_required', 'role:admin or is_admin:1',
description='Who is considered an admin'),
policy.RuleDefault('admin_api', 'is_admin_required:True',
description='admin API requirement'),
policy.RuleDefault('default', 'rule:admin_api',
description='default rule'),
]
CONF = cfg.CONF
LOG = log.getLogger(__name__)
_ENFORCER = None
# we can get a policy enforcer by this init.
# oslo policy support change policy rule dynamically.
# at present, policy.enforce will reload the policy rules when it checks
# the policy files have been touched.
def init(policy_file=None, rules=None,
default_rule=None, use_conf=True, overwrite=True):
"""Init an Enforcer class.
:param policy_file: Custom policy file to use, if none is
specified, ``conf.policy_file`` will be
used.
:param rules: Default dictionary / Rules to use. It will be
considered just in the first instantiation. If
:meth:`load_rules` with ``force_reload=True``,
:meth:`clear` or :meth:`set_rules` with
``overwrite=True`` is called this will be overwritten.
:param default_rule: Default rule to use, conf.default_rule will
be used if none is specified.
:param use_conf: Whether to load rules from cache or config file.
:param overwrite: Whether to overwrite existing rules when reload rules
from config file.
"""
global _ENFORCER
if not _ENFORCER:
# http://docs.openstack.org/developer/oslo.policy/usage.html
_ENFORCER = policy.Enforcer(CONF,
policy_file=policy_file,
rules=rules,
default_rule=default_rule,
use_conf=use_conf,
overwrite=overwrite)
_ENFORCER.register_defaults(base_rules)
return _ENFORCER
def check_is_admin(context):
"""Whether or not role contains 'admin' role according to policy setting.
"""
init()
target = {}
credentials = context.to_dict()
return _ENFORCER.enforce('context_is_admin', target, credentials)

View File

@ -0,0 +1,184 @@
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Time related utilities and helper functions.
"""
import calendar
import datetime
import iso8601
# ISO 8601 extended time format with microseconds
_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f'
_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND
def isotime(at=None, subsecond=False):
"""Stringify time in ISO 8601 format"""
if not at:
at = utcnow()
st = at.strftime(_ISO8601_TIME_FORMAT
if not subsecond
else _ISO8601_TIME_FORMAT_SUBSECOND)
tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
st += ('Z' if tz == 'UTC' else tz)
return st
def parse_isotime(timestr):
"""Parse time from ISO 8601 format"""
try:
return iso8601.parse_date(timestr)
except iso8601.ParseError as e:
raise ValueError(e.message)
except TypeError as e:
raise ValueError(e.message)
def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
"""Returns formatted utcnow."""
if not at:
at = utcnow()
return at.strftime(fmt)
def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT):
"""Turn a formatted time back into a datetime."""
return datetime.datetime.strptime(timestr, fmt)
def normalize_time(timestamp):
"""Normalize time in arbitrary timezone to UTC naive object"""
offset = timestamp.utcoffset()
if offset is None:
return timestamp
return timestamp.replace(tzinfo=None) - offset
def is_older_than(before, seconds):
"""Return True if before is older than seconds."""
if isinstance(before, str):
before = parse_strtime(before).replace(tzinfo=None)
return utcnow() - before > datetime.timedelta(seconds=seconds)
def is_newer_than(after, seconds):
"""Return True if after is newer than seconds."""
if isinstance(after, str):
after = parse_strtime(after).replace(tzinfo=None)
return after - utcnow() > datetime.timedelta(seconds=seconds)
def utcnow_ts():
"""Timestamp version of our utcnow function."""
return calendar.timegm(utcnow().timetuple())
def utcnow():
"""Overridable version of utils.utcnow."""
if utcnow.override_time:
try:
return utcnow.override_time.pop(0)
except AttributeError:
return utcnow.override_time
return datetime.datetime.utcnow()
def iso8601_from_timestamp(timestamp):
"""Returns a iso8601 formated date from timestamp"""
return isotime(datetime.datetime.utcfromtimestamp(timestamp))
utcnow.override_time = None
def set_time_override(override_time=datetime.datetime.utcnow()):
"""
Override utils.utcnow to return a constant time or a list thereof,
one at a time.
"""
utcnow.override_time = override_time
def advance_time_delta(timedelta):
"""Advance overridden time using a datetime.timedelta."""
assert(utcnow.override_time is not None)
try:
for dt in utcnow.override_time:
dt += timedelta
except TypeError:
utcnow.override_time += timedelta
def advance_time_seconds(seconds):
"""Advance overridden time by seconds."""
advance_time_delta(datetime.timedelta(0, seconds))
def clear_time_override():
"""Remove the overridden time."""
utcnow.override_time = None
def marshall_now(now=None):
"""Make an rpc-safe datetime with microseconds.
Note: tzinfo is stripped, but not required for relative times."""
if not now:
now = utcnow()
return dict(day=now.day, month=now.month, year=now.year, hour=now.hour,
minute=now.minute, second=now.second,
microsecond=now.microsecond)
def unmarshall_time(tyme):
"""Unmarshall a datetime dict."""
return datetime.datetime(day=tyme['day'],
month=tyme['month'],
year=tyme['year'],
hour=tyme['hour'],
minute=tyme['minute'],
second=tyme['second'],
microsecond=tyme['microsecond'])
def delta_seconds(before, after):
"""
Compute the difference in seconds between two date, time, or
datetime objects (as a float, to microsecond resolution).
"""
delta = after - before
try:
return delta.total_seconds()
except AttributeError:
return ((delta.days * 24 * 3600) + delta.seconds +
float(delta.microseconds) / (10 ** 6))
def is_soon(dt, window):
"""
Determines if time is going to happen in the next window seconds.
:params dt: the time
:params window: minimum seconds to remain to consider the time not soon
:return: True if expiration is within the given duration
"""
soon = (utcnow() + datetime.timedelta(seconds=window))
return normalize_time(dt) <= soon

View File

@ -0,0 +1,62 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 Intel Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2013-2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import six
import uuid
from oslo_log import log
from oslo_concurrency import lockutils
from fm.common import constants
LOG = log.getLogger(__name__)
def generate_uuid():
return str(uuid.uuid4())
def synchronized(name, external=True):
if external:
lock_path = constants.FM_LOCK_PATH
else:
lock_path = None
return lockutils.synchronized(name,
lock_file_prefix='fm-',
external=external,
lock_path=lock_path)
def safe_rstrip(value, chars=None):
"""Removes trailing characters from a string if that does not make it empty
:param value: A string value that will be stripped.
:param chars: Characters to remove.
:return: Stripped value.
"""
if not isinstance(value, six.string_types):
LOG.warning("Failed to remove trailing character. "
"Returning original object. "
"Supplied object is not a string: %s,", value)
return value
return value.rstrip(chars) or value

View File

@ -0,0 +1,11 @@
[DEFAULT]
output_file = fm.conf.sample
wrap_width = 79
namespace = fm.api.conf
namespace = keystonemiddleware.auth_token
namespace = oslo.middleware
namespace = oslo.log
namespace = oslo.policy
namespace = oslo.db

View File

@ -0,0 +1,5 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#

152
fm-rest-api/fm/fm/db/api.py Normal file
View File

@ -0,0 +1,152 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
"""
Base classes for storage engines
"""
import abc
from oslo_config import cfg
from oslo_db import api as db_api
_BACKEND_MAPPING = {'sqlalchemy': 'fm.db.sqlalchemy.api'}
IMPL = db_api.DBAPI.from_config(cfg.CONF, backend_mapping=_BACKEND_MAPPING,
lazy=True)
def get_instance():
"""Return a DB API instance."""
return IMPL
class Connection(object):
"""Base class for storage system connections."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(self):
"""Constructor."""
@abc.abstractmethod
def alarm_create(self, values):
"""Create a new alarm.
:param values: A dict containing several items used to identify
and track the alarm.
:returns: An alarm.
"""
@abc.abstractmethod
def alarm_get(self, uuid):
"""Return an alarm.
:param uuid: The uuid of an alarm.
:returns: An alarm.
"""
@abc.abstractmethod
def alarm_get_by_ids(self, alarm_id, entity_instance_id):
"""Return an alarm.
:param alarm_id: The alarm_id of an alarm.
:param entity_instance_id: The entity_instance_id of an alarm.
:returns: An alarm.
"""
@abc.abstractmethod
def alarm_get_all(self, uuid=None, alarm_id=None, entity_type_id=None,
entity_instance_id=None, severity=None, alarm_type=None):
"""Return a list of alarms for the given filters.
:param uuid: The uuid of an alarm.
:param alarm_id: The alarm_id of an alarm.
:param entity_type_id: The entity_type_id of an alarm.
:param entity_instance_id: The entity_instance_id of an alarm.
:param severity: The severity of an alarm.
:param alarm_type: The alarm_type of an alarm.
:returns: alarms.
"""
@abc.abstractmethod
def alarm_get_list(self, limit=None, marker=None,
sort_key=None, sort_dir=None):
"""Return a list of alarms.
:param limit: Maximum number of alarm to return.
:param marker: the last item of the previous page; we return the next
result set.
:param sort_key: Attribute by which results should be sorted.
:param sort_dir: direction in which results should be sorted.
(asc, desc)
"""
@abc.abstractmethod
def alarm_update(self, id, values):
"""Update properties of an alarm.
:param id: The id or uuid of an alarm.
:param values: Dict of values to update.
:returns: An alarm.
"""
@abc.abstractmethod
def alarm_destroy(self, id):
"""Destroy an alarm.
:param id: The id or uuid of an alarm.
"""
@abc.abstractmethod
def alarm_destroy_by_ids(self, alarm_id, entity_instance_id):
"""Destroy an alarm.
:param alarm_id: The alarm_id of an alarm.
:param entity_instance_id: The entity_instance_id of an alarm.
"""
@abc.abstractmethod
def event_log_get(self, uuid):
"""Return an event_log.
:param uuid: The uuid of an event_log.
:returns: An event_log.
"""
@abc.abstractmethod
def event_log_get_all(self, uuid=None, event_log_id=None,
entity_type_id=None, entity_instance_id=None,
severity=None, event_log_type=None, start=None,
end=None, limit=None):
"""Return a list of event_log for the given filters.
:param uuid: The uuid of an event_log.
:param event_log_id: The id of an event_log.
:param entity_type_id: The entity_type_id of an event_log.
:param entity_instance_id: The entity_instance_id of an event_log.
:param severity: The severity of an event_log.
:param alarm_type: The alarm_type of an event_log.
:param start: The event_logs that occurred after start
:param end: The event_logs that occurred before end
:returns: event_log.
"""
@abc.abstractmethod
def event_log_get_list(self, limit=None, marker=None,
sort_key=None, sort_dir=None, evtType="ALL"):
"""Return a list of event_log.
:param limit: Maximum number of event_log to return.
:param marker: the last item of the previous page; we return the next
result set.
:param sort_key: Attribute by which results should be sorted.
:param sort_dir: direction in which results should be sorted.
(asc, desc)
"""

View File

@ -0,0 +1,57 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
# under the License.
"""Database setup and migration commands."""
import os
from oslo_config import cfg
from oslo_db import options
from stevedore import driver
from fm.db.sqlalchemy import api as db_api
options.set_defaults(cfg.CONF)
_IMPL = None
MIGRATE_REPO_PATH = os.path.join(
os.path.abspath(os.path.dirname(__file__)),
'sqlalchemy',
'migrate_repo',
)
def get_backend():
global _IMPL
if not _IMPL:
_IMPL = driver.DriverManager("fm.database.migration_backend",
cfg.CONF.database.backend).driver
return _IMPL
def db_sync(version=None, engine=None):
"""Migrate the database to `version` or the most recent version."""
if engine is None:
engine = db_api.get_engine()
return get_backend().db_sync(engine=engine,
abs_path=MIGRATE_REPO_PATH,
version=version
)
def upgrade(version=None):
"""Migrate the database to `version` or the most recent version."""
return get_backend().upgrade(version)
def version():
return get_backend().version()
def create_schema():
return get_backend().create_schema()

View File

@ -0,0 +1,5 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#

View File

@ -0,0 +1,445 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
"""SQLAlchemy storage backend."""
import threading
from oslo_log import log
from oslo_config import cfg
from oslo_utils import uuidutils
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import enginefacade
from oslo_db.sqlalchemy import utils as db_utils
from oslo_db.sqlalchemy import session as db_session
from sqlalchemy import asc, desc, or_
from sqlalchemy.orm.exc import NoResultFound
from fm.common import constants
from fm.common import exceptions
from fm.common import utils
from fm.db import api
from fm.db.sqlalchemy import models
from fm import objects
CONF = cfg.CONF
LOG = log.getLogger(__name__)
_LOCK = threading.Lock()
_FACADE = None
context_manager = enginefacade.transaction_context()
context_manager.configure()
def _create_facade_lazily():
global _LOCK
with _LOCK:
global _FACADE
if _FACADE is None:
_FACADE = db_session.EngineFacade(
CONF.database.connection,
**dict(CONF.database)
)
return _FACADE
def get_engine():
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(**kwargs):
facade = _create_facade_lazily()
return facade.get_session(**kwargs)
def get_backend():
"""The backend is this module itself."""
return Connection()
def _session_for_read():
_context = threading.local()
return enginefacade.reader.using(_context)
def _session_for_write():
_context = threading.local()
LOG.debug("_session_for_write CONTEXT=%s" % _context)
return enginefacade.writer.using(_context)
def _paginate_query(model, limit=None, marker=None, sort_key=None,
sort_dir=None, query=None):
if not query:
query = model_query(model)
if not sort_key:
sort_keys = []
elif not isinstance(sort_key, list):
sort_keys = [sort_key]
else:
sort_keys = sort_key
if 'id' not in sort_keys:
sort_keys.append('id')
query = db_utils.paginate_query(query, model, limit, sort_keys,
marker=marker, sort_dir=sort_dir)
return query.all()
def model_query(model, *args, **kwargs):
"""Query helper for simpler session usage.
:param session: if present, the session to use
"""
with _session_for_read() as session:
query = session.query(model, *args)
return query
def add_event_log_filter_by_event_suppression(query, include_suppress):
"""Adds an event_suppression filter to a query.
Filters results by suppression status
:param query: Initial query to add filter to.
:param include_suppress: Value for filtering results by.
:return: Modified query.
"""
query = query.outerjoin(models.EventSuppression,
models.EventLog.event_log_id == models.EventSuppression.alarm_id)
query = query.add_columns(models.EventSuppression.suppression_status)
if include_suppress:
return query
return query.filter(or_(models.EventLog.state == 'log',
models.EventSuppression.suppression_status ==
constants.FM_UNSUPPRESSED))
def add_alarm_filter_by_event_suppression(query, include_suppress):
"""Adds an event_suppression filter to a query.
Filters results by suppression status
:param query: Initial query to add filter to.
:param include_suppress: Value for filtering results by.
:return: Modified query.
"""
query = query.join(models.EventSuppression,
models.Alarm.alarm_id == models.EventSuppression.alarm_id)
query = query.add_columns(models.EventSuppression.suppression_status)
if include_suppress:
return query
return query.filter(models.EventSuppression.suppression_status ==
constants.FM_UNSUPPRESSED)
def add_alarm_mgmt_affecting_by_event_suppression(query):
"""Adds a mgmt_affecting attribute from event_suppression to query.
:param query: Initial query.
:return: Modified query.
"""
query = query.add_columns(models.EventSuppression.mgmt_affecting)
return query
def add_alarm_degrade_affecting_by_event_suppression(query):
"""Adds a degrade_affecting attribute from event_suppression to query.
:param query: Initial query.
:return: Modified query.
"""
query = query.add_columns(models.EventSuppression.degrade_affecting)
return query
class Connection(api.Connection):
"""SqlAlchemy connection."""
def __init__(self):
pass
def get_session(self, autocommit=True):
return get_session(autocommit)
def alarm_create(self, values):
if not values.get('uuid'):
values['uuid'] = utils.generate_uuid()
alarm = models.Alarm()
alarm.update(values)
with _session_for_write() as session:
try:
session.add(alarm)
session.flush()
except db_exc.DBDuplicateEntry:
raise exceptions.AlarmAlreadyExists(uuid=values['uuid'])
return alarm
@objects.objectify(objects.alarm)
def alarm_get(self, uuid):
query = model_query(models.Alarm)
if uuid:
query = query.filter_by(uuid=uuid)
query = add_alarm_filter_by_event_suppression(query, include_suppress=True)
query = add_alarm_mgmt_affecting_by_event_suppression(query)
query = add_alarm_degrade_affecting_by_event_suppression(query)
try:
result = query.one()
except NoResultFound:
raise exceptions.AlarmNotFound(alarm=uuid)
return result
def alarm_get_by_ids(self, alarm_id, entity_instance_id):
query = model_query(models.Alarm)
if alarm_id and entity_instance_id:
query = query.filter_by(alarm_id=alarm_id)
query = query.filter_by(entity_instance_id=entity_instance_id)
query = query.join(models.EventSuppression,
models.Alarm.alarm_id ==
models.EventSuppression.alarm_id)
query = add_alarm_mgmt_affecting_by_event_suppression(query)
query = add_alarm_degrade_affecting_by_event_suppression(query)
try:
result = query.one()
except NoResultFound:
return None
return result
def alarm_get_all(self, uuid=None, alarm_id=None, entity_type_id=None,
entity_instance_id=None, severity=None, alarm_type=None,
limit=None, include_suppress=False):
query = model_query(models.Alarm, read_deleted="no")
query = query.order_by(asc(models.Alarm.severity),
asc(models.Alarm.entity_instance_id),
asc(models.Alarm.id))
if uuid is not None:
query = query.filter(models.Alarm.uuid.contains(uuid))
if alarm_id is not None:
query = query.filter(models.Alarm.alarm_id.contains(alarm_id))
if entity_type_id is not None:
query = query.filter(models.Alarm.entity_type_id.contains(
entity_type_id))
if entity_instance_id is not None:
query = query.filter(models.Alarm.entity_instance_id.contains(
entity_instance_id))
if severity is not None:
query = query.filter(models.Alarm.severity.contains(severity))
if alarm_type is not None:
query = query.filter(models.Alarm.alarm_type.contains(alarm_type))
query = add_alarm_filter_by_event_suppression(query, include_suppress)
query = add_alarm_mgmt_affecting_by_event_suppression(query)
query = add_alarm_degrade_affecting_by_event_suppression(query)
if limit is not None:
query = query.limit(limit)
alarm_list = []
try:
alarm_list = query.all()
except UnicodeDecodeError:
LOG.error("UnicodeDecodeError occurred, "
"return an empty alarm list.")
return alarm_list
@objects.objectify(objects.alarm)
def alarm_get_list(self, limit=None, marker=None,
sort_key=None, sort_dir=None,
include_suppress=False):
query = model_query(models.Alarm)
query = add_alarm_filter_by_event_suppression(query, include_suppress)
query = add_alarm_mgmt_affecting_by_event_suppression(query)
query = add_alarm_degrade_affecting_by_event_suppression(query)
return _paginate_query(models.Alarm, limit, marker,
sort_key, sort_dir, query)
def alarm_update(self, id, values):
with _session_for_write() as session:
query = model_query(models.Alarm, session=session)
query = query.filter_by(id=id)
count = query.update(values, synchronize_session='fetch')
if count != 1:
raise exceptions.AlarmNotFound(alarm=id)
return query.one()
def alarm_destroy(self, id):
with _session_for_write() as session:
query = model_query(models.Alarm, session=session)
query = query.filter_by(uuid=id)
try:
query.one()
except NoResultFound:
raise exceptions.AlarmNotFound(alarm=id)
query.delete()
def alarm_destroy_by_ids(self, alarm_id, entity_instance_id):
with _session_for_write() as session:
query = model_query(models.Alarm, session=session)
if alarm_id and entity_instance_id:
query = query.filter_by(alarm_id=alarm_id)
query = query.filter_by(entity_instance_id=entity_instance_id)
try:
query.one()
except NoResultFound:
raise exceptions.AlarmNotFound(alarm=alarm_id)
query.delete()
@objects.objectify(objects.event_log)
def event_log_get(self, uuid):
query = model_query(models.EventLog)
if uuid:
query = query.filter_by(uuid=uuid)
query = add_event_log_filter_by_event_suppression(query,
include_suppress=True)
try:
result = query.one()
except NoResultFound:
raise exceptions.EventLogNotFound(eventLog=uuid)
return result
def _addEventTypeToQuery(self, query, evtType="ALL"):
if evtType is None or not (evtType in ["ALL", "ALARM", "LOG"]):
evtType = "ALL"
if evtType == "ALARM":
query = query.filter(or_(models.EventLog.state == "set",
models.EventLog.state == "clear"))
if evtType == "LOG":
query = query.filter(models.EventLog.state == "log")
return query
@objects.objectify(objects.event_log)
def event_log_get_all(self, uuid=None, event_log_id=None,
entity_type_id=None, entity_instance_id=None,
severity=None, event_log_type=None, start=None,
end=None, limit=None, evtType="ALL", include_suppress=False):
query = model_query(models.EventLog, read_deleted="no")
query = query.order_by(desc(models.EventLog.timestamp))
if uuid is not None:
query = query.filter_by(uuid=uuid)
query = self._addEventTypeToQuery(query, evtType)
if event_log_id is not None:
query = query.filter(models.EventLog.event_log_id.contains(
event_log_id))
if entity_type_id is not None:
query = query.filter(models.EventLog.entity_type_id.contains(
entity_type_id))
if entity_instance_id is not None:
query = query.filter(models.EventLog.entity_instance_id.contains(
entity_instance_id))
if severity is not None:
query = query.filter(models.EventLog.severity.contains(severity))
if event_log_type is not None:
query = query.filter_by(event_log_type=event_log_type)
if start is not None:
query = query.filter(models.EventLog.timestamp >= start)
if end is not None:
query = query.filter(models.EventLog.timestamp <= end)
if include_suppress is not None:
query = add_event_log_filter_by_event_suppression(query,
include_suppress)
if limit is not None:
query = query.limit(limit)
hist_list = []
try:
hist_list = query.all()
except UnicodeDecodeError:
LOG.error("UnicodeDecodeError occurred, "
"return an empty event log list.")
return hist_list
@objects.objectify(objects.event_log)
def event_log_get_list(self, limit=None, marker=None,
sort_key=None, sort_dir=None, evtType="ALL",
include_suppress=False):
query = model_query(models.EventLog)
query = self._addEventTypeToQuery(query, evtType)
query = add_event_log_filter_by_event_suppression(query,
include_suppress)
return _paginate_query(models.EventLog, limit, marker,
sort_key, sort_dir, query)
@objects.objectify(objects.event_suppression)
def event_suppression_get(self, id):
query = model_query(models.EventSuppression)
if uuidutils.is_uuid_like(id):
query = query.filter_by(uuid=id)
else:
query = query.filter_by(id=id)
try:
result = query.one()
except NoResultFound:
raise exceptions.InvalidParameterValue(
err="No event suppression entry found for %s" % id)
return result
@objects.objectify(objects.event_suppression)
def event_suppression_get_all(self, uuid=None, alarm_id=None,
description=None, suppression_status=None, limit=None,
sort_key=None, sort_dir=None):
query = model_query(models.EventSuppression, read_deleted="no")
if uuid is not None:
query = query.filter_by(uuid=uuid)
if alarm_id is not None:
query = query.filter_by(alarm_id=alarm_id)
if description is not None:
query = query.filter_by(description=description)
if suppression_status is not None:
query = query.filter_by(suppression_status=suppression_status)
query = query.filter_by(set_for_deletion=False)
return _paginate_query(models.EventSuppression, limit, None,
sort_key, sort_dir, query)
@objects.objectify(objects.event_suppression)
def event_suppression_update(self, uuid, values):
with _session_for_write() as session:
query = model_query(models.EventSuppression, session=session)
query = query.filter_by(uuid=uuid)
count = query.update(values, synchronize_session='fetch')
if count != 1:
raise exceptions.NotFound(id)
return query.one()

View File

@ -0,0 +1,5 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#

View File

@ -0,0 +1,11 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from migrate.versioning.shell import main
if __name__ == '__main__':
main(debug='False', repository='.')

View File

@ -0,0 +1,20 @@
[db_settings]
# Used to identify which repository this database is versioned under.
# You can use the name of your project.
repository_id=fm
# The name of the database table used to track the schema version.
# This name shouldn't already be used by your project.
# If this is changed once a database is under version control, you'll need to
# change the table name in each database too.
version_table=migrate_version
# When committing a change script, Migrate will attempt to generate the
# sql for all supported databases; normally, if one of them fails - probably
# because you don't have that database installed - it is ignored and the
# commit continues, perhaps ending successfully.
# Databases in this list MUST compile successfully during a commit, or the
# entire commit will fail. List the databases your application will actually
# be using to ensure your updates to that database work properly.
# This must be a list; example: ['postgres','sqlite']
required_dbs=[]

View File

@ -0,0 +1,112 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from sqlalchemy import Column, MetaData, String, Table
from sqlalchemy import Boolean, Integer, DateTime
from sqlalchemy.schema import ForeignKeyConstraint
ENGINE = 'InnoDB'
CHARSET = 'utf8'
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
event_suppression = Table(
'event_suppression',
meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(36), unique=True, index=True),
Column('alarm_id', String(15), unique=True, index=True),
Column('description', String(255)),
Column('suppression_status', String(15)),
Column('set_for_deletion', Boolean),
Column('mgmt_affecting', String(255)),
Column('degrade_affecting', String(255)),
mysql_engine=ENGINE,
mysql_charset=CHARSET,
)
event_suppression.create()
alarm = Table(
'alarm',
meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(255), unique=True, index=True),
Column('alarm_id', String(255), index=True),
Column('alarm_state', String(255)),
Column('entity_type_id', String(255), index=True),
Column('entity_instance_id', String(255), index=True),
Column('timestamp', DateTime(timezone=False)),
Column('severity', String(255), index=True),
Column('reason_text', String(255)),
Column('alarm_type', String(255), index=True),
Column('probable_cause', String(255)),
Column('proposed_repair_action', String(255)),
Column('service_affecting', Boolean),
Column('suppression', Boolean),
Column('inhibit_alarms', Boolean),
Column('masked', Boolean),
ForeignKeyConstraint(
['alarm_id'],
['event_suppression.alarm_id'],
use_alter=True,
name='fk_alarm_esuppression_alarm_id'
),
mysql_engine=ENGINE,
mysql_charset=CHARSET,
)
alarm.create()
event_log = Table(
'event_log',
meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(255), unique=True, index=True),
Column('event_log_id', String(255), index=True),
Column('state', String(255)),
Column('entity_type_id', String(255), index=True),
Column('entity_instance_id', String(255), index=True),
Column('timestamp', DateTime(timezone=False)),
Column('severity', String(255), index=True),
Column('reason_text', String(255)),
Column('event_log_type', String(255), index=True),
Column('probable_cause', String(255)),
Column('proposed_repair_action', String(255)),
Column('service_affecting', Boolean),
Column('suppression', Boolean),
Column('alarm_id', String(255), nullable=True),
ForeignKeyConstraint(
['alarm_id'],
['event_suppression.alarm_id'],
use_alter=True,
name='fk_elog_alarm_id_esuppression_alarm_id'
),
mysql_engine=ENGINE,
mysql_charset=CHARSET,
)
event_log.create()
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from Initial is unsupported.')

View File

@ -0,0 +1,5 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#

View File

@ -0,0 +1,75 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import os
import sqlalchemy
from oslo_db.sqlalchemy import enginefacade
from migrate import exceptions as versioning_exceptions
from migrate.versioning import api as versioning_api
from migrate.versioning.repository import Repository
from fm.common import exceptions
from fm.db import migration
from fm.common.i18n import _
_REPOSITORY = None
get_engine = enginefacade.get_legacy_facade().get_engine
def db_sync(version=None):
if version is not None:
try:
version = int(version)
except ValueError:
raise exceptions.ApiError(_("version should be an integer"))
current_version = db_version()
repository = _find_migrate_repo()
if version is None or version > current_version:
return versioning_api.upgrade(get_engine(), repository, version)
else:
return versioning_api.downgrade(get_engine(), repository,
version)
def db_version():
repository = _find_migrate_repo()
try:
return versioning_api.db_version(get_engine(), repository)
except versioning_exceptions.DatabaseNotControlledError:
meta = sqlalchemy.MetaData()
engine = get_engine()
meta.reflect(bind=engine)
tables = meta.tables
if len(tables) == 0:
db_version_control(migration.INIT_VERSION)
return versioning_api.db_version(get_engine(), repository)
else:
# Some pre-Essex DB's may not be version controlled.
# Require them to upgrade using Essex first.
raise exceptions.ApiError(
_("Upgrade DB using Essex release first."))
def db_version_control(version=None):
repository = _find_migrate_repo()
versioning_api.version_control(get_engine(), repository, version)
return version
def _find_migrate_repo():
"""Get the path for the migrate repository."""
global _REPOSITORY
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
assert os.path.exists(path)
if _REPOSITORY is None:
_REPOSITORY = Repository(path)
return _REPOSITORY

View File

@ -0,0 +1,129 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import json
import urlparse
from oslo_config import cfg
from sqlalchemy import Column, ForeignKey, Integer, Boolean
from sqlalchemy import String
from sqlalchemy import DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.types import TypeDecorator, VARCHAR
from oslo_db.sqlalchemy import models
def table_args():
engine_name = urlparse.urlparse(cfg.CONF.database_connection).scheme
if engine_name == 'mysql':
return {'mysql_engine': 'InnoDB',
'mysql_charset': "utf8"}
return None
class JSONEncodedDict(TypeDecorator):
"""Represents an immutable structure as a json-encoded string."""
impl = VARCHAR
def process_bind_param(self, value, dialect):
if value is not None:
value = json.dumps(value)
return value
def process_result_value(self, value, dialect):
if value is not None:
value = json.loads(value)
return value
class FmBase(models.TimestampMixin, models.ModelBase):
metadata = None
def as_dict(self):
d = {}
for c in self.__table__.columns:
d[c.name] = self[c.name]
return d
Base = declarative_base(cls=FmBase)
class Alarm(Base):
__tablename__ = 'alarm'
id = Column(Integer, primary_key=True, nullable=False)
uuid = Column(String(255), unique=True, index=True)
alarm_id = Column('alarm_id', String(255),
ForeignKey('event_suppression.alarm_id'),
nullable=True, index=True)
alarm_state = Column(String(255))
entity_type_id = Column(String(255), index=True)
entity_instance_id = Column(String(255), index=True)
timestamp = Column(DateTime(timezone=False))
severity = Column(String(255), index=True)
reason_text = Column(String(255))
alarm_type = Column(String(255), index=True)
probable_cause = Column(String(255))
proposed_repair_action = Column(String(255))
service_affecting = Column(Boolean, default=False)
suppression = Column(Boolean, default=False)
inhibit_alarms = Column(Boolean, default=False)
masked = Column(Boolean, default=False)
class EventLog(Base):
__tablename__ = 'event_log'
id = Column(Integer, primary_key=True, nullable=False)
uuid = Column(String(255), unique=True, index=True)
event_log_id = Column('event_log_id', String(255),
ForeignKey('event_suppression.alarm_id'),
nullable=True, index=True)
state = Column(String(255))
entity_type_id = Column(String(255), index=True)
entity_instance_id = Column(String(255), index=True)
timestamp = Column(DateTime(timezone=False))
severity = Column(String(255), index=True)
reason_text = Column(String(255))
event_log_type = Column(String(255), index=True)
probable_cause = Column(String(255))
proposed_repair_action = Column(String(255))
service_affecting = Column(Boolean, default=False)
suppression = Column(Boolean, default=False)
class EventSuppression(Base):
__tablename__ = 'event_suppression'
id = Column('id', Integer, primary_key=True, nullable=False)
uuid = Column('uuid', String(36), unique=True)
alarm_id = Column('alarm_id', String(255), unique=True)
description = Column('description', String(255))
suppression_status = Column('suppression_status', String(255))
set_for_deletion = Column('set_for_deletion', Boolean)
mgmt_affecting = Column('mgmt_affecting', String(255))
degrade_affecting = Column('degrade_affecting', String(255))

View File

@ -0,0 +1,40 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import functools
from fm.objects import alarm
from fm.objects import event_log
from fm.objects import event_suppression
def objectify(klass):
"""Decorator to convert database results into specified objects.
:param klass: database results class
"""
def the_decorator(fn):
@functools.wraps(fn)
def wrapper(*args, **kwargs):
result = fn(*args, **kwargs)
try:
return klass.from_db_object(result)
except TypeError:
return [klass.from_db_object(obj) for obj in result]
return wrapper
return the_decorator
alarm = alarm.Alarm
event_log = event_log.EventLog
event_suppression = event_suppression.EventSuppression
__all__ = (alarm,
event_log,
event_suppression)

View File

@ -0,0 +1,69 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from oslo_versionedobjects import base as object_base
from fm.db import api as db_api
from fm.objects import base
from fm.objects import utils
from fm.common import constants
class Alarm(base.FmObject):
VERSION = '1.0'
dbapi = db_api.get_instance()
fields = {
'id': int,
'uuid': utils.str_or_none,
'alarm_id': utils.str_or_none,
'alarm_state': utils.str_or_none,
'entity_type_id': utils.str_or_none,
'entity_instance_id': utils.str_or_none,
'timestamp': utils.datetime_or_str_or_none,
'severity': utils.str_or_none,
'reason_text': utils.str_or_none,
'alarm_type': utils.str_or_none,
'probable_cause': utils.str_or_none,
'proposed_repair_action': utils.str_or_none,
'service_affecting': utils.str_or_none,
'suppression': utils.str_or_none,
'inhibit_alarms': utils.str_or_none,
'masked': utils.str_or_none,
'suppression_status': utils.str_or_none,
'mgmt_affecting': utils.str_or_none,
'degrade_affecting': utils.str_or_none,
}
@staticmethod
def _from_db_object(server, db_server):
"""Converts a database entity to a formal object."""
if isinstance(db_server, tuple):
db_server_fields = db_server[0]
db_suppress_status = db_server[constants.DB_SUPPRESS_STATUS]
db_mgmt_affecting = db_server[constants.DB_MGMT_AFFECTING]
db_degrade_affecting = db_server[constants.DB_DEGRADE_AFFECTING]
db_server_fields['suppression_status'] = db_suppress_status
db_server_fields['mgmt_affecting'] = db_mgmt_affecting
db_server_fields['degrade_affecting'] = db_degrade_affecting
else:
db_server_fields = db_server
for field in server.fields:
server[field] = db_server_fields[field]
server.obj_reset_changes()
return server
@object_base.remotable_classmethod
def get_by_uuid(cls, context, uuid):
return cls.dbapi.alarm_get(uuid)
def save_changes(self, context, updates):
self.dbapi.alarm_update(self.uuid, updates)

View File

@ -0,0 +1,87 @@
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from oslo_versionedobjects import base as object_base
from oslo_versionedobjects import fields as object_fields
class FmObject(object_base.VersionedObject):
"""Base class and object factory.
This forms the base of all objects that can be remoted or instantiated
via RPC. Simply defining a class that inherits from this base class
will make it remotely instantiatable. Objects should implement the
necessary "get" classmethod routines as well as "save" object methods
as appropriate.
"""
OBJ_SERIAL_NAMESPACE = 'fm_object'
OBJ_PROJECT_NAMESPACE = 'fm'
fields = {
'created_at': object_fields.DateTimeField(nullable=True),
'updated_at': object_fields.DateTimeField(nullable=True),
}
def __getitem__(self, name):
return getattr(self, name)
def __setitem__(self, name, value):
setattr(self, name, value)
def as_dict(self):
return dict((k, getattr(self, k))
for k in self.fields
if hasattr(self, k))
def obj_refresh(self, loaded_object):
"""Applies updates for objects that inherit from base.FmObject.
Checks for updated attributes in an object. Updates are applied from
the loaded object column by column in comparison with the current
object.
"""
for field in self.fields:
if (self.obj_attr_is_set(field) and
self[field] != loaded_object[field]):
self[field] = loaded_object[field]
@staticmethod
def _from_db_object(obj, db_object):
"""Converts a database entity to a formal object.
:param obj: An object of the class.
:param db_object: A DB model of the object
:return: The object of the class with the database entity added
"""
for field in obj.fields:
obj[field] = db_object[field]
obj.obj_reset_changes()
return obj
@classmethod
def from_db_object(cls, db_obj):
return cls._from_db_object(cls(), db_obj)
class FmObjectSerializer(object_base.VersionedObjectSerializer):
# Base class to use for object hydration
OBJ_BASE_CLASS = FmObject

View File

@ -0,0 +1,60 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from oslo_log import log
from oslo_versionedobjects import base as object_base
from fm.db import api as db_api
from fm.objects import base
from fm.objects import utils
LOG = log.getLogger('event_log')
class EventLog(base.FmObject):
VERSION = '1.0'
dbapi = db_api.get_instance()
fields = {
'id': int,
'uuid': utils.str_or_none,
'event_log_id': utils.str_or_none,
'state': utils.str_or_none,
'entity_type_id': utils.str_or_none,
'entity_instance_id': utils.str_or_none,
'timestamp': utils.datetime_or_str_or_none,
'severity': utils.str_or_none,
'reason_text': utils.str_or_none,
'event_log_type': utils.str_or_none,
'probable_cause': utils.str_or_none,
'proposed_repair_action': utils.str_or_none,
'service_affecting': utils.str_or_none,
'suppression': utils.str_or_none,
'suppression_status': utils.str_or_none,
}
@staticmethod
def _from_db_object(server, db_server):
"""Converts a database entity to a formal object."""
if isinstance(db_server, tuple):
db_server_fields = db_server[0]
db_suppress_status = db_server[1]
db_server_fields['suppression_status'] = db_suppress_status
else:
db_server_fields = db_server
for field in server.fields:
server[field] = db_server_fields[field]
server.obj_reset_changes()
return server
@object_base.remotable_classmethod
def get_by_uuid(cls, context, uuid):
return cls.dbapi.event_log_get(uuid)

View File

@ -0,0 +1,30 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from oslo_versionedobjects import base as object_base
from fm.db import api as db_api
from fm.objects import base
from fm.objects import utils
class EventSuppression(base.FmObject):
VERSION = '1.0'
dbapi = db_api.get_instance()
fields = {
'id': int,
'uuid': utils.uuid_or_none,
'alarm_id': utils.str_or_none,
'description': utils.str_or_none,
'suppression_status': utils.str_or_none,
}
@object_base.remotable_classmethod
def get_by_uuid(cls, context, uuid):
return cls.dbapi.event_suppression_get(uuid)

View File

@ -0,0 +1,93 @@
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
"""Utility methods for objects"""
import datetime
import iso8601
import uuid
import six
from fm.common.i18n import _
from fm.common import timeutils
def datetime_or_none(dt):
"""Validate a datetime or None value."""
if dt is None:
return None
elif isinstance(dt, datetime.datetime):
if dt.utcoffset() is None:
# NOTE(danms): Legacy objects from sqlalchemy are stored in UTC,
# but are returned without a timezone attached.
# As a transitional aid, assume a tz-naive object is in UTC.
return dt.replace(tzinfo=iso8601.iso8601.Utc())
else:
return dt
raise ValueError('A datetime.datetime is required here')
def datetime_or_str_or_none(val):
if isinstance(val, str):
return timeutils.parse_isotime(val)
return datetime_or_none(val)
def int_or_none(val):
"""Attempt to parse an integer value, or None."""
if val is None:
return val
else:
return int(val)
def str_or_none(val):
"""Attempt to stringify a value, or None."""
if val is None:
return val
else:
return six.text_type(val)
def uuid_or_none(val):
"""Attempt to dictify a value, or None."""
if val is None:
return None
elif isinstance(val, str):
return str(uuid.UUID(val.strip()))
raise ValueError(_('Invalid UUID value %s') % val)
def dt_serializer(name):
"""Return a datetime serializer for a named attribute."""
def serializer(self, name=name):
if getattr(self, name) is not None:
return timeutils.isotime(getattr(self, name))
else:
return None
return serializer
def dt_deserializer(instance, val):
"""A deserializer method for datetime attributes."""
if val is None:
return None
else:
return timeutils.parse_isotime(val)

View File

@ -0,0 +1,147 @@
#! /bin/sh
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
### BEGIN INIT INFO
# Provides: fm-api
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 3 5
# Default-Stop: 0 1 2 6
# Short-Description: Fault Management REST API Service
# Description: Fault Management REST API Service
### END INIT INFO
. /etc/init.d/functions
# Linux Standard Base (LSB) Error Codes
RETVAL=0
GENERIC_ERROR=1
INVALID_ARGS=2
UNSUPPORTED_FEATURE=3
NOT_INSTALLED=5
NOT_RUNNING=7
NAME="fm-api"
DAEMON="/usr/bin/${NAME}"
PIDFILE="/var/run/${NAME}.pid"
CONFIGFILE="/etc/fm/fm.conf"
if ! [ -x ${DAEMON} ] ; then
logger "${DAEMON} is missing"
exit ${NOT_INSTALLED}
fi
PATH=/sbin:/usr/sbin:/bin:/usr/bin:/usr/local/bin
export PATH
status()
{
# Status function has a standard set of return codes to indicate daemon status
# http://refspecs.linuxbase.org/LSB_3.1.0/LSB-Core-generic/LSB-Core-generic/iniscrptact.html
local my_processes=`pgrep -l -f "^(python|/usr/bin/python|/usr/bin/python2) ${DAEMON}([^\w-]|$)"`
if [ -z "${my_processes}" ]; then
echo "$NAME is not running"
return 1
fi
echo "$NAME is running"
return 0
}
start ()
{
status >/dev/null
if [ $? -eq 0 ]; then
echo "$NAME is already running"
return 0
fi
# Delete stale pidfile, if any
rm -f $PIDFILE
start-stop-daemon --start -b --make-pidfile --pidfile $PIDFILE -x ${DAEMON} -- --config-file=${CONFIGFILE}
RETVAL=$?
if [ ${RETVAL} -eq 0 ]; then
status >/dev/null
if [ $? -eq 0 ]; then
logger -t $NAME "start OK"
echo "OK"
return 0
fi
logger -t $NAME "start-stop-daemon returned 0, but status fails"
rm -f $PIDFILE
fi
logger -t $NAME "start failed"
return ${GENERIC_ERROR}
}
confirm_stop()
{
local my_processes=`pgrep -l -f "^(python|/usr/bin/python|/usr/bin/python2) ${DAEMON}([^\w-]|$)"`
if [ -n "${my_processes}" ]
then
logger -t $NAME "About to SIGKILL the following: ${my_processes}"
pkill -KILL -f "^(python|/usr/bin/python|/usr/bin/python2) ${DAEMON}([^\w-]|$)"
fi
}
stop ()
{
status >/dev/null
if [ $? -ne 0 ]; then
echo "$NAME is not running"
return 0
fi
echo -n "Stopping ${NAME}: "
if [ -f $PIDFILE ]; then
start-stop-daemon --stop --quiet --retry 3 --oknodo --pidfile $PIDFILE
fi
confirm_stop
rm -f $PIDFILE
# Confirm status
status >/dev/null
if [ $? -ne 0 ]; then
echo "Stopped"
return 0
else
echo "Failed"
return ${GENERIC_ERROR}
fi
}
rc=0
case "$1" in
start)
start
rc=$?
;;
stop)
stop
rc=$?
;;
restart|force-reload|reload)
stop
start
rc=$?
;;
status)
status
rc=$?
;;
*)
echo "Usage: $0 {start|stop|force-reload|restart|reload|status}"
exit 1
;;
esac
exit $rc

View File

@ -0,0 +1,15 @@
[Unit]
Description=Fault Management REST API Service
After=nfscommon.service sw-patch.service
After=network-online.target systemd-udev-settle.service
[Service]
Type=simple
RemainAfterExit=yes
User=root
ExecStart=/etc/rc.d/init.d/fm-api start
ExecStop=/etc/rc.d/init.d/fm-api stop
PIDFile=/var/run/fm-api.pid
[Install]
WantedBy=multi-user.target

50
fm-rest-api/fm/setup.cfg Normal file
View File

@ -0,0 +1,50 @@
[metadata]
name = fm
version = 1.0.0
summary = CGTS Fault Management API service
classifier =
Environment :: OpenStack
Intended Audience :: Information Technology
Intended Audience :: System Administrators
License :: OSI Approved :: Apache Software License
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.5
[global]
setup-hooks =
pbr.hooks.setup_hook
[files]
packages =
fm
[entry_points]
console_scripts =
fm-api = fm.cmd.api:main
fm-dbsync = fm.cmd.dbsync:main
fm.database.migration_backend =
sqlalchemy = oslo_db.sqlalchemy.migration
[build_sphinx]
all_files = 1
build-dir = doc/build
source-dir = doc/source
warning-is-error = 1
[egg_info]
tag_build =
tag_date = 0
tag_svn_revision = 0
[extract_messages]
keywords = _ gettext ngettext l_ lazy_gettext
mapping_file = babel.cfg
output_file = fm/locale/fm.pot
[wheel]
universal = 1

43
fm-rest-api/fm/setup.py Normal file
View File

@ -0,0 +1,43 @@
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
from setuptools import setup, find_packages
setup(
name='fm',
description='Titanium Cloud Fault Management',
version='1.0.0',
license='windriver',
platforms=['any'],
provides=['fm'],
packages=find_packages(),
package_data={},
include_package_data=False,
entry_points={
'fm.database.migration_backend': [
'sqlalchemy = oslo_db.sqlalchemy.migration',
],
'console_scripts': [
'fm-dbsync = fm.cmd.dbsync:main',
'fm-api = fm.cmd.api:main'
],
}
)

13
python-fmclient/PKG-INFO Normal file
View File

@ -0,0 +1,13 @@
Metadata-Version: 1.1
Name: python-fmclient
Version: 1.0
Summary: A python client library for Fault Management
Home-page:
Author: Windriver
Author-email: info@windriver.com
License: windriver
A python client library for Fault Management
Platform: UNKNOWN

View File

@ -0,0 +1,2 @@
SRC_DIR="fmclient"
TIS_PATCH_VER=1

View File

@ -0,0 +1,76 @@
%global pypi_name fmclient
Summary: A python client library for Fault Management
Name: python-fmclient
Version: 1.0
Release: %{tis_patch_ver}%{?_tis_dist}
License: windriver
Group: base
Packager: Wind River <info@windriver.com>
URL: unknown
Source0: %{name}-%{version}.tar.gz
BuildRequires: git
BuildRequires: python-pbr >= 2.0.0
BuildRequires: python-setuptools
Requires: python-keystoneauth1 >= 3.1.0
Requires: python-pbr >= 2.0.0
Requires: python-six >= 1.9.0
Requires: python-oslo-i18n >= 2.1.0
Requires: python-oslo-utils >= 3.20.0
Requires: python-requests
%description
A python client library for Fault Management
%define local_bindir /usr/bin/
%define pythonroot /usr/lib64/python2.7/site-packages
%define debug_package %{nil}
%package sdk
Summary: SDK files for %{name}
%description sdk
Contains SDK files for %{name} package
%prep
%autosetup -n %{name}-%{version} -S git
# Remove bundled egg-info
rm -rf *.egg-info
%build
echo "Start build"
export PBR_VERSION=%{version}
%{__python} setup.py build
%install
echo "Start install"
export PBR_VERSION=%{version}
%{__python} setup.py install --root=%{buildroot} \
--install-lib=%{pythonroot} \
--prefix=/usr \
--install-data=/usr/share \
--single-version-externally-managed
# prep SDK package
mkdir -p %{buildroot}/usr/share/remote-clients
tar zcf %{buildroot}/usr/share/remote-clients/%{name}-%{version}.tgz --exclude='.gitignore' --exclude='.gitreview' -C .. %{name}-%{version}
%clean
echo "CLEAN CALLED"
rm -rf $RPM_BUILD_ROOT
%files
%defattr(-,root,root,-)
%doc LICENSE
%{local_bindir}/*
%{pythonroot}/%{pypi_name}/*
%{pythonroot}/%{pypi_name}-%{version}*.egg-info
%files sdk
/usr/share/remote-clients/%{name}-%{version}.tgz

176
python-fmclient/fmclient/LICENSE Executable file
View File

@ -0,0 +1,176 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.

View File

@ -0,0 +1,22 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
try:
import fmclient.client
Client = fmclient.client.get_client
except ImportError:
import warnings
warnings.warn("Could not import fmclient.client", ImportWarning)
import pbr.version
version_info = pbr.version.VersionInfo('fmclient')
try:
__version__ = version_info.version_string()
except AttributeError:
__version__ = None

View File

@ -0,0 +1,93 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from oslo_utils import importutils
from keystoneauth1 import loading
from fmclient.common.i18n import _
from fmclient import exc
SERVICE_TYPE = 'faultmanagement'
def get_client(version, endpoint=None, session=None, auth_token=None,
fm_url=None, username=None, password=None, auth_url=None,
project_id=None, project_name=None,
region_name=None, timeout=None,
user_domain_id=None, user_domain_name=None,
project_domain_id=None, project_domain_name=None,
service_type=SERVICE_TYPE, endpoint_type=None,
**ignored_kwargs):
"""Get an authenticated client, based on the credentials."""
kwargs = {}
interface = endpoint_type or 'publicURL'
endpoint = endpoint or fm_url
if auth_token and endpoint:
kwargs.update({
'token': auth_token,
})
if timeout:
kwargs.update({
'timeout': timeout,
})
elif auth_url:
auth_kwargs = {}
auth_type = 'password'
auth_kwargs.update({
'auth_url': auth_url,
'project_id': project_id,
'project_name': project_name,
'user_domain_id': user_domain_id,
'user_domain_name': user_domain_name,
'project_domain_id': project_domain_id,
'project_domain_name': project_domain_name,
})
if username and password:
auth_kwargs.update({
'username': username,
'password': password
})
elif auth_token:
auth_type = 'token'
auth_kwargs.update({
'token': auth_token,
})
# Create new session only if it was not passed in
if not session:
loader = loading.get_plugin_loader(auth_type)
auth_plugin = loader.load_from_options(**auth_kwargs)
session = loading.session.Session().load_from_options(
auth=auth_plugin, timeout=timeout)
exception_msg = _('Must provide Keystone credentials or user-defined '
'endpoint and token')
if not endpoint:
if session:
try:
endpoint = session.get_endpoint(
service_type=service_type,
interface=interface,
region_name=region_name
)
except Exception as e:
raise exc.AuthSystem(
_('%(message)s, error was: %(error)s') %
{'message': exception_msg, 'error': e})
else:
# Neither session, nor valid auth parameters provided
raise exc.AuthSystem(exception_msg)
kwargs['endpoint_override'] = endpoint
kwargs['service_type'] = service_type
kwargs['interface'] = interface
kwargs['version'] = version
fm_module = importutils.import_versioned_module('fmclient',
version, 'client')
client_class = getattr(fm_module, 'Client')
return client_class(endpoint, session=session, **kwargs)

View File

@ -0,0 +1,5 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#

View File

@ -0,0 +1,149 @@
# Copyright 2013 Wind River, Inc.
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Base utilities to build API operation managers and objects on top of.
"""
import copy
def getid(obj):
"""Abstracts the common pattern of allowing both an object or an
object's ID (UUID) as a parameter when dealing with relationships.
"""
try:
return obj.id
except AttributeError:
return obj
class Manager(object):
"""Managers interact with a particular type of API and provide CRUD
operations for them.
"""
resource_class = None
def __init__(self, api):
self.api = api
def _create(self, url, body):
resp, body = self.post(url, body=body)
if body:
return self.resource_class(self, body)
def _upload(self, url, body, data=None):
resp = self.api.post(url, body=body, data=data)
return resp
def _json_get(self, url, body=None):
"""send a GET request and return a json serialized object"""
resp, body = self.api.get(url, body=body)
return body
def _format_body_data(self, body, response_key):
if response_key:
try:
data = body[response_key]
except KeyError:
return []
else:
data = body
if not isinstance(data, list):
data = [data]
return data
def _list(self, url, response_key=None, obj_class=None, body=None):
resp, body = self.api.get(url)
if obj_class is None:
obj_class = self.resource_class
data = self._format_body_data(body, response_key)
return [obj_class(self, res, loaded=True) for res in data if res]
def _update(self, url, **kwargs):
resp, body = self.api.patch(url, **kwargs)
# PATCH/PUT requests may not return a body
if body:
return self.resource_class(self, body)
def _delete(self, url):
self.api.delete(url)
class Resource(object):
"""A resource represents a particular instance of an object (tenant, user,
etc). This is pretty much just a bag for attributes.
:param manager: Manager object
:param info: dictionary representing resource attributes
:param loaded: prevent lazy-loading if set to True
"""
def __init__(self, manager, info, loaded=False):
self.manager = manager
self._info = info
self._add_details(info)
self._loaded = loaded
def _add_details(self, info):
for (k, v) in info.iteritems():
setattr(self, k, v)
def __getattr__(self, k):
if k not in self.__dict__:
# NOTE(bcwaldon): disallow lazy-loading if already loaded once
if not self.is_loaded():
self.get()
return self.__getattr__(k)
raise AttributeError(k)
else:
return self.__dict__[k]
def __repr__(self):
reprkeys = sorted(k for k in self.__dict__.keys() if k[0] != '_' and
k != 'manager')
info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys)
return "<%s %s>" % (self.__class__.__name__, info)
def get(self):
# set_loaded() first ... so if we have to bail, we know we tried.
self.set_loaded(True)
if not hasattr(self.manager, 'get'):
return
new = self.manager.get(self.id)
if new:
self._add_details(new._info)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
if hasattr(self, 'id') and hasattr(other, 'id'):
return self.id == other.id
return self._info == other._info
def is_loaded(self):
return self._loaded
def set_loaded(self, val):
self._loaded = val
def to_dict(self):
return copy.deepcopy(self._info)

View File

@ -0,0 +1,42 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
"""
The sole purpose of this module is to manage access to the _no_wrap variable
used by the wrapping_formatters module
"""
_no_wrap = [False]
def is_nowrap_set(no_wrap=None):
"""
returns True if no wrapping desired.
determines this by either the no_wrap parameter
or if the global no_wrap flag is set
:param no_wrap:
:return:
"""
global _no_wrap
if no_wrap is True:
return True
if no_wrap is False:
return False
no_wrap = _no_wrap[0]
return no_wrap
def set_no_wrap(no_wrap):
"""
Sets the global nowrap flag
then returns result of call to is_nowrap_set(..)
:param no_wrap:
:return:
"""
global _no_wrap
if no_wrap is not None:
_no_wrap[0] = no_wrap
return is_nowrap_set(no_wrap)

View File

@ -0,0 +1,170 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import inspect
import sys
import six
from six.moves import http_client
from fmclient.common.i18n import _
class ClientException(Exception):
"""An error occurred."""
def __init__(self, message=None):
self.message = message
def __str__(self):
return self.message or self.__class__.__doc__
class InvalidEndpoint(ClientException):
"""The provided endpoint is invalid."""
class EndpointException(ClientException):
"""Something is rotten in Service Catalog."""
class CommunicationError(ClientException):
"""Unable to communicate with server."""
class Conflict(ClientException):
"""HTTP 409 - Conflict.
Indicates that the request could not be processed because of conflict
in the request, such as an edit conflict.
"""
http_status = http_client.CONFLICT
message = _("Conflict")
# _code_map contains all the classes that have http_status attribute.
_code_map = dict(
(getattr(obj, 'http_status', None), obj)
for name, obj in vars(sys.modules[__name__]).items()
if inspect.isclass(obj) and getattr(obj, 'http_status', False)
)
class HttpError(ClientException):
"""The base exception class for all HTTP exceptions."""
http_status = 0
message = _("HTTP Error")
def __init__(self, message=None, details=None,
response=None, request_id=None,
url=None, method=None, http_status=None):
self.http_status = http_status or self.http_status
self.message = message or self.message
self.details = details
self.request_id = request_id
self.response = response
self.url = url
self.method = method
formatted_string = "%s (HTTP %s)" % (self.message, self.http_status)
if request_id:
formatted_string += " (Request-ID: %s)" % request_id
super(HttpError, self).__init__(formatted_string)
class HTTPRedirection(HttpError):
"""HTTP Redirection."""
message = _("HTTP Redirection")
class HTTPClientError(HttpError):
"""Client-side HTTP error.
Exception for cases in which the client seems to have erred.
"""
message = _("HTTP Client Error")
class HttpServerError(HttpError):
"""Server-side HTTP error.
Exception for cases in which the server is aware that it has
erred or is incapable of performing the request.
"""
message = _("HTTP Server Error")
class ServiceUnavailable(HttpServerError):
"""HTTP 503 - Service Unavailable.
The server is currently unavailable.
"""
http_status = http_client.SERVICE_UNAVAILABLE
message = _("Service Unavailable")
class GatewayTimeout(HttpServerError):
"""HTTP 504 - Gateway Timeout.
The server was acting as a gateway or proxy and did not receive a timely
response from the upstream server.
"""
http_status = http_client.GATEWAY_TIMEOUT
message = "Gateway Timeout"
class HttpVersionNotSupported(HttpServerError):
"""HTTP 505 - HttpVersion Not Supported.
The server does not support the HTTP protocol version used in the request.
"""
http_status = http_client.HTTP_VERSION_NOT_SUPPORTED
message = "HTTP Version Not Supported"
def from_response(response, method, url=None):
"""Returns an instance of :class:`HttpError` or subclass based on response.
:param response: instance of `requests.Response` class
:param method: HTTP method used for request
:param url: URL used for request
"""
req_id = response.headers.get("x-openstack-request-id")
kwargs = {
"http_status": response.status_code,
"response": response,
"method": method,
"url": url,
"request_id": req_id,
}
if "retry-after" in response.headers:
kwargs["retry_after"] = response.headers["retry-after"]
content_type = response.headers.get("Content-Type", "")
if content_type.startswith("application/json"):
try:
body = response.json()
except ValueError:
pass
else:
if isinstance(body, dict):
error = body.get(list(body)[0])
if isinstance(error, dict):
kwargs["message"] = (error.get("message") or
error.get("faultstring"))
kwargs["details"] = (error.get("details") or
six.text_type(body))
elif content_type.startswith("text/"):
kwargs["details"] = getattr(response, 'text', '')
try:
cls = _code_map[response.status_code]
except KeyError:
if 500 <= response.status_code < 600:
cls = HttpServerError
elif 400 <= response.status_code < 500:
cls = HTTPClientError
else:
cls = HttpError
return cls(**kwargs)

Some files were not shown because too many files have changed in this diff Show More