diff --git a/centos_pkg_dirs b/centos_pkg_dirs index b79716c6..6335d5e7 100644 --- a/centos_pkg_dirs +++ b/centos_pkg_dirs @@ -2,5 +2,7 @@ fm-api fm-common fm-mgr fm-doc +fm-rest-api +python-fmclient snmp-ext snmp-audittrail diff --git a/fm-api/fm_api/fm_api.py b/fm-api/fm_api/fm_api.py index de61985a..c5bddb08 100755 --- a/fm-api/fm_api/fm_api.py +++ b/fm-api/fm_api/fm_api.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2013-2014 Wind River Systems, Inc. +# Copyright (c) 2013-2018 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -11,9 +11,9 @@ # import copy -import subprocess from . import constants import six +import fm_core class ClientException(Exception): @@ -31,8 +31,8 @@ class ClientException(Exception): # on the alarm. Optional. # alarm_type: see ALARM_TYPE # probable_cause: see ALARM_PROBABLE_CAUSE -# proposed_repair_action:free-format string providing additional details on how to -# clear the alarm. Optional. +# proposed_repair_action:free-format string providing additional details on +# how to clear the alarm. Optional. # service_affecting: true/false, default to false # suppression: true/false (allowed/not-allowed), default to false # uuid: unique identifier of an active alarm instance, filled by FM system @@ -76,67 +76,59 @@ class FaultAPIs(object): self._check_required_attributes(data) self._validate_attributes(data) buff = self._alarm_to_str(data) - cmd = constants.FM_CLIENT_SET_FAULT + '"' + buff + '"' - resp = self._run_cmd_and_get_resp(cmd) - if (resp[0] == "Ok") and (len(resp) > 1): - return resp[1] - else: + try: + return fm_core.set(buff) + except (RuntimeError, SystemError, TypeError): return None def clear_fault(self, alarm_id, entity_instance_id): sep = constants.FM_CLIENT_STR_SEP buff = (sep + self._check_val(alarm_id) + sep + self._check_val(entity_instance_id) + sep) - cmd = constants.FM_CLIENT_CLEAR_FAULT + '"' + buff + '"' - - resp = self._run_cmd_and_get_resp(cmd) - if resp[0] == "Ok": - return True - else: + try: + return fm_core.clear(buff) + except (RuntimeError, SystemError, TypeError): return False def get_fault(self, alarm_id, entity_instance_id): sep = constants.FM_CLIENT_STR_SEP buff = (sep + self._check_val(alarm_id) + sep + self._check_val(entity_instance_id) + sep) - cmd = constants.FM_CLIENT_GET_FAULT + '"' + buff + '"' - resp = self._run_cmd_and_get_resp(cmd) - if (resp[0] == "Ok") and (len(resp) > 1): - return self._str_to_alarm(resp[1]) - else: + try: + resp = fm_core.get(buff) + return self._str_to_alarm(resp) if resp else None + except (RuntimeError, SystemError, TypeError): return None def clear_all(self, entity_instance_id): - cmd = constants.FM_CLIENT_CLEAR_ALL + '"' + entity_instance_id + '"' - resp = self._run_cmd_and_get_resp(cmd) - if resp[0] == "Ok": - return True - else: + try: + return fm_core.clear_all(entity_instance_id) + except (RuntimeError, SystemError, TypeError): return False def get_faults(self, entity_instance_id): - cmd = constants.FM_CLIENT_GET_FAULTS + '"' + entity_instance_id + '"' - resp = self._run_cmd_and_get_resp(cmd) - data = [] - if resp[0] == "Ok": - for i in range(1, len(resp)): - alarm = self._str_to_alarm(resp[i]) - data.append(alarm) - return data - else: - return None + try: + resp = fm_core.get_by_eid(entity_instance_id) + if resp is not None: + data = [] + for i in resp: + data.append(self._str_to_alarm(i)) + return data + except (RuntimeError, SystemError, TypeError): + pass + return None def get_faults_by_id(self, alarm_id): - cmd = constants.FM_CLIENT_GET_FAULTS_BY_ID + '"' + alarm_id + '"' - resp = self._run_cmd_and_get_resp(cmd) - data = [] - if resp[0] == "Ok": - for i in range(1, len(resp)): - alarm = self._str_to_alarm(resp[i]) - data.append(alarm) - return data - else: - return None + try: + resp = fm_core.get_by_aid(alarm_id) + if resp is not None: + data = [] + for i in resp: + data.append(self._str_to_alarm(i)) + return data + except (RuntimeError, SystemError, TypeError): + pass + return None @staticmethod def _check_val(data): @@ -177,21 +169,6 @@ class FaultAPIs(object): line[constants.FM_TIMESTAMP_INDEX]) return data - @staticmethod - def _run_cmd_and_get_resp(cmd): - resp = [] - cmd = cmd.encode('utf-8') - pro = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) - output = pro.communicate()[0] - lines = output.split('\n') - for line in lines: - if line != '': - resp.append(line) - if len(resp) == 0: - resp.append("Unknown") - - return resp - @staticmethod def _check_required_attributes(data): if data.alarm_id is None: diff --git a/fm-common/centos/fm-common.spec b/fm-common/centos/fm-common.spec index 2e562ed6..0076ccb2 100644 --- a/fm-common/centos/fm-common.spec +++ b/fm-common/centos/fm-common.spec @@ -1,6 +1,7 @@ %define local_dir /usr/local %define local_bindir %{local_dir}/bin %define cgcs_doc_deploy_dir /opt/deploy/cgcs_doc +%define pythonroot /usr/lib64/python2.7/site-packages Summary: CGTS Platform Fault Management Common Package Name: fm-common @@ -15,6 +16,7 @@ BuildRequires: util-linux BuildRequires: postgresql-devel BuildRequires: libuuid-devel BuildRequires: python-devel +BuildRequires: python-setuptools %package -n fm-common-dev Summary: CGTS Platform Fault Management Common Package - Development files @@ -47,6 +49,7 @@ VER=%{version} MAJOR=`echo $VER | awk -F . '{print $1}'` MINOR=`echo $VER | awk -F . '{print $2}'` make MAJOR=$MAJOR MINOR=$MINOR %{?_smp_mflags} +%{__python} setup.py build %install rm -rf $RPM_BUILD_ROOT @@ -55,9 +58,18 @@ MAJOR=`echo $VER | awk -F . '{print $1}'` MINOR=`echo $VER | awk -F . '{print $2}'` make DEST_DIR=$RPM_BUILD_ROOT BIN_DIR=%{local_bindir} LIB_DIR=%{_libdir} INC_DIR=%{_includedir} MAJOR=$MAJOR MINOR=$MINOR install_non_bb +%{__python} setup.py install --root=%{buildroot} \ + --install-lib=%{pythonroot} \ + --prefix=/usr \ + --install-data=/usr/share + install -d $RPM_BUILD_ROOT/usr/bin install -m 755 fm_db_sync_event_suppression.py $RPM_BUILD_ROOT/usr/bin/fm_db_sync_event_suppression.py +# install the headers that used by fm-mgr package +install -m 644 -p -D fmConfig.h %{buildroot}%{_includedir}/fmConfig.h +install -m 644 -p -D fmLog.h %{buildroot}%{_includedir}/fmLog.h + CGCS_DOC_DEPLOY=$RPM_BUILD_ROOT/%{cgcs_doc_deploy_dir} install -d $CGCS_DOC_DEPLOY # install fmAlarm.h in CGCS_DOC_DEPLOY_DIR @@ -75,6 +87,9 @@ rm -rf $RPM_BUILD_ROOT %{_libdir}/*.so.* /usr/bin/fm_db_sync_event_suppression.py +%{pythonroot}/fm_core.so +%{pythonroot}/fm_core-*.egg-info + %files -n fm-common-dev %defattr(-,root,root,-) %{_includedir}/* diff --git a/fm-common/sources/Makefile b/fm-common/sources/Makefile index fc3b9a2d..dc495254 100755 --- a/fm-common/sources/Makefile +++ b/fm-common/sources/Makefile @@ -1,6 +1,6 @@ SRCS = fmAPI.cpp fmFile.cpp fmLog.cpp fmMsgServer.cpp fmMutex.cpp fmSocket.cpp fmThread.cpp fmTime.cpp \ fmAlarmUtils.cpp fmDb.cpp fmDbUtils.cpp fmDbAlarm.cpp fmSnmpUtils.cpp \ - fmDbEventLog.cpp fmEventSuppression.cpp + fmDbEventLog.cpp fmEventSuppression.cpp fmConfig.cpp CLI_SRCS = fm_cli.cpp OBJS = $(SRCS:.cpp=.o) CLI_OBJS = fm_cli.o @@ -9,7 +9,7 @@ INCLUDES = -I./ CCFLAGS = -g -O2 -Wall -Werror -fPIC LIBFMCOMMON_SO := libfmcommon.so -build: lib fmClientCli +build: lib fmClientCli .cpp.o: $(CXX) $(CCFLAGS) $(INCLUDES) $(EXTRACCFLAGS) -c $< -o $@ diff --git a/fm-common/sources/fmAlarmUtils.cpp b/fm-common/sources/fmAlarmUtils.cpp index 37813113..81c353be 100644 --- a/fm-common/sources/fmAlarmUtils.cpp +++ b/fm-common/sources/fmAlarmUtils.cpp @@ -472,7 +472,7 @@ bool fm_alarm_to_string(const SFmAlarmDataT *alarm, std::string &str) { return str.size()>0; } -bool fm_alarm_from_string(const std::string &alstr,SFmAlarmDataT *a) { +bool fm_alarm_from_string(const std::string &alstr, SFmAlarmDataT *a) { strvect_t s; str_to_vector(alstr, s); diff --git a/fm-common/sources/fmConfig.cpp b/fm-common/sources/fmConfig.cpp new file mode 100644 index 00000000..15e69d84 --- /dev/null +++ b/fm-common/sources/fmConfig.cpp @@ -0,0 +1,92 @@ +// +// Copyright (c) 2018 Wind River Systems, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// + +#include +#include +#include +#include +#include "fmAPI.h" +#include "fmLog.h" +#include "fmFile.h" +#include "fmConfig.h" +#include "fmMutex.h" +#include "fmConstants.h" +#include "fmSnmpConstants.h" +#include "fmSnmpUtils.h" + +typedef std::map configParams; + +static const char *conf = NULL; +static int config_loaded = false; + +CFmMutex & getConfMutex(){ + static CFmMutex *m = new CFmMutex; + return *m; +} + +configParams &getConfigMap(){ + static configParams conf; + return conf; +} + +void fm_conf_set_file(const char *fn){ + conf = fn; +} + +void fm_get_config_paramters(){ + CfmFile f; + std::string delimiter = "="; + std::string line, key, value; + size_t pos = 0; + + if (conf == NULL){ + FM_ERROR_LOG("The config file is not set\n"); + exit(-1); + } + + if (!f.open(conf, CfmFile::READ, false)){ + FM_ERROR_LOG("Failed to open config file: %s\n", conf); + exit(-1); + } + + while (true){ + if (!f.read_line(line)) break; + + if (line.size() == 0) continue; + + if (line[0] == '#') continue; + + pos = line.find(delimiter); + key = line.substr(0, pos); + value = line.erase(0, pos + delimiter.length()); + getConfigMap()[key] = value; + if (key.compare(FM_SNMP_TRAPDEST) == 0){ + set_trap_dest_list(value); + } + if (key.compare(FM_SQL_CONNECTION) != 0){ + // Don't log sql_connection, as it has a password + FM_INFO_LOG("Config key (%s), value (%s)", + key.c_str(), value.c_str()); + } + } +} + +bool fm_get_config_key(std::string &key, std::string &val){ + configParams::iterator it; + CFmMutexGuard m(getConfMutex()); + + if (!config_loaded){ + fm_get_config_paramters(); + config_loaded = true; + } + + it = getConfigMap().find(key); + if (it != getConfigMap().end()){ + val = it->second; + return true; + } + return false; +} diff --git a/fm-common/sources/fmConfig.h b/fm-common/sources/fmConfig.h new file mode 100644 index 00000000..edec9da5 --- /dev/null +++ b/fm-common/sources/fmConfig.h @@ -0,0 +1,18 @@ +// +// Copyright (c) 2018 Wind River Systems, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// + +#ifndef FMCONFIG_H_ +#define FMCONFIG_H_ + +#include + +void fm_conf_set_file(const char *fn); + +void fm_get_config_paramters(); + +bool fm_get_config_key(std::string &key, std::string &val); + +#endif /* FMCONFIG_H_ */ diff --git a/fm-common/sources/fmDbConstants.h b/fm-common/sources/fmConstants.h similarity index 91% rename from fm-common/sources/fmDbConstants.h rename to fm-common/sources/fmConstants.h index 18a865da..8508d882 100644 --- a/fm-common/sources/fmDbConstants.h +++ b/fm-common/sources/fmConstants.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2014 Wind River Systems, Inc. +// Copyright (c) 2014-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -16,10 +16,10 @@ #define FM_DB_TABLE_COUNT_COLUMN "count" /* Alarm table name */ -#define FM_ALARM_TABLE_NAME "i_alarm" +#define FM_ALARM_TABLE_NAME "alarm" /* Event log table name */ -#define FM_EVENT_LOG_TABLE_NAME "i_event_log" +#define FM_EVENT_LOG_TABLE_NAME "event_log" /* Event suppression table name */ #define FM_EVENT_SUPPRESSION_TABLE_NAME "event_suppression" @@ -81,11 +81,6 @@ #define FM_EVENT_SUPPRESSION_UNSUPPRESSED "unsuppressed" #define FM_EVENT_SUPPRESSION_NONE "None" -/* System table name */ -#define FM_SYSTEM_TABLE_NAME "i_system" - -#define FM_SYSTEM_NAME_COLUMN "name" -#define FM_SYSTEM_REGION_COLUMN "region_name" #define FM_ENTITY_ROOT_KEY "system=" #define FM_ENTITY_REGION_KEY "region=" @@ -93,6 +88,10 @@ /* config keys */ #define FM_SQL_CONNECTION "sql_connection" #define FM_EVENT_LOG_MAX_SIZE "event_log_max_size" +#define FM_SYSTEM_NAME "system_name" +#define FM_REGION_NAME "region_name" +#define FM_DEBUG_FLAG "debug" +#define FM_STRING_TRUE "True" #define CLEAR_ALL_REASON_TEXT "System initiated hierarchical alarm clear" diff --git a/fm-common/sources/fmDb.cpp b/fm-common/sources/fmDb.cpp index b16c2d44..cd963c80 100644 --- a/fm-common/sources/fmDb.cpp +++ b/fm-common/sources/fmDb.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2016 Wind River Systems, Inc. +// Copyright (c) 2016-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -16,7 +16,7 @@ #include "fmAlarmUtils.h" #include "fmDbUtils.h" #include "fmDb.h" -#include "fmDbConstants.h" +#include "fmConstants.h" #include "fmThread.h" @@ -62,11 +62,7 @@ bool CFmDBSession::connect(const char *uri){ m_conn.uri = uri; val = get_parameter_status("standard_conforming_strings"); - //FM_INFO_LOG("connect: server standard_conforming_strings parameter: %s", - // val ? val : "unavailable"); m_conn.equote = (val && (0 == strcmp("off", val))); - //FM_INFO_LOG("connect: server requires E'' quotes: %s", m_conn.equote ? "YES" : "NO"); - m_conn.server_version = PQserverVersion(m_conn.pgconn); m_conn.protocol = PQprotocolVersion(m_conn.pgconn); m_conn.encoding = get_parameter_status("client_encoding"); @@ -132,7 +128,7 @@ bool CFmDBSession::query(const char *db_cmd,fm_db_result_t & result) { return true; } -bool CFmDBSession::cmd(const char *db_cmd){ +bool CFmDBSession::cmd(const char *db_cmd, bool check_row){ PGresult *res; bool rc = true; @@ -147,7 +143,7 @@ bool CFmDBSession::cmd(const char *db_cmd){ FM_ERROR_LOG("Failed to execute (%s) (%s)", db_cmd, PQresultErrorMessage(res)); rc = false; } - if (rc){ + if (rc && check_row){ int row = atoi(PQcmdTuples(res)); FM_DEBUG_LOG("SQL command returned successful: %d rows affected.\n", row); if (row < 1) rc = false; diff --git a/fm-common/sources/fmDb.h b/fm-common/sources/fmDb.h index b95c1f8b..945022aa 100644 --- a/fm-common/sources/fmDb.h +++ b/fm-common/sources/fmDb.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2014 Wind River Systems, Inc. +// Copyright (c) 2014-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -53,7 +53,7 @@ public: bool reconnect(); bool query(const char *db_cmd,fm_db_result_t & result); - bool cmd(const char *db_cmd); + bool cmd(const char *db_cmd, bool check_row=true); bool params_cmd(fm_db_util_sql_params & sql_params); PGconn* get_pgconn(){ diff --git a/fm-common/sources/fmDbAlarm.cpp b/fm-common/sources/fmDbAlarm.cpp index f08612c9..e59b6880 100644 --- a/fm-common/sources/fmDbAlarm.cpp +++ b/fm-common/sources/fmDbAlarm.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2014 Wind River Systems, Inc. +// Copyright (c) 2014-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -11,7 +11,7 @@ #include "fmLog.h" #include "fmDbAlarm.h" #include "fmAlarmUtils.h" -#include "fmDbConstants.h" +#include "fmConstants.h" #include "fmDbUtils.h" typedef std::map itos_t; @@ -319,7 +319,7 @@ bool CFmDbAlarmOperation::get_all_alarms(CFmDBSession &sess, SFmAlarmDataT **ala if (!get_alarms(sess, NULL, res)) return false; - std::string sname = fm_db_util_get_system_name(sess); + std::string sname = fm_db_util_get_system_name(); unsigned int found_num_alarms = res.size(); @@ -436,7 +436,7 @@ bool CFmDbAlarmOperation::get_all_history_alarms(CFmDBSession &sess, SFmAlarmDat *alarms = NULL; if (!get_history(sess,res)) return false; - std::string sname = fm_db_util_get_system_name(sess); + std::string sname = fm_db_util_get_system_name(); unsigned int found_num_alarms = res.size(); diff --git a/fm-common/sources/fmDbAlarm.h b/fm-common/sources/fmDbAlarm.h index 9c6ee25b..56b1f448 100644 --- a/fm-common/sources/fmDbAlarm.h +++ b/fm-common/sources/fmDbAlarm.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2014 Wind River Systems, Inc. +// Copyright (c) 2014-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -13,7 +13,7 @@ #include #include "fmAPI.h" -#include "fmDbConstants.h" +#include "fmConstants.h" #include "fmDb.h" class CFmDbAlarm { diff --git a/fm-common/sources/fmDbEventLog.cpp b/fm-common/sources/fmDbEventLog.cpp index 50e7f1ab..4964c22f 100644 --- a/fm-common/sources/fmDbEventLog.cpp +++ b/fm-common/sources/fmDbEventLog.cpp @@ -12,7 +12,7 @@ #include "fmDbAlarm.h" #include "fmDbEventLog.h" #include "fmAlarmUtils.h" -#include "fmDbConstants.h" +#include "fmConstants.h" #include "fmDbUtils.h" typedef std::map itos_t; @@ -291,7 +291,7 @@ bool CFmDbEventLogOperation::get_all_event_logs(CFmDBSession &sess, SFmAlarmData if (!get_event_logs(sess, res)) return false; - std::string sname = fm_db_util_get_system_name(sess); + std::string sname = fm_db_util_get_system_name(); unsigned int found_num_logs = res.size(); diff --git a/fm-common/sources/fmDbEventLog.h b/fm-common/sources/fmDbEventLog.h index 15ae2a79..f6aa7f85 100644 --- a/fm-common/sources/fmDbEventLog.h +++ b/fm-common/sources/fmDbEventLog.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2016 Wind River Systems, Inc. +// Copyright (c) 2016-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -13,7 +13,7 @@ #include #include "fmAPI.h" -#include "fmDbConstants.h" +#include "fmConstants.h" #include "fmDb.h" typedef std::map itos_t; diff --git a/fm-common/sources/fmDbUtils.cpp b/fm-common/sources/fmDbUtils.cpp index d05ccaa1..90160f3a 100644 --- a/fm-common/sources/fmDbUtils.cpp +++ b/fm-common/sources/fmDbUtils.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2014 Wind River Systems, Inc. +// Copyright (c) 2014-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -25,29 +25,18 @@ #include "fmDb.h" #include "fmDbUtils.h" #include "fmDbAPI.h" -#include "fmDbConstants.h" +#include "fmConstants.h" #include "fmAlarmUtils.h" +#include "fmConfig.h" -typedef std::map configParams; - -static const char *conf = NULL; static pthread_mutex_t mutex = PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP; -CFmMutex & getConfMutex(){ - static CFmMutex *m = new CFmMutex; - return *m; -} - -configParams &getConfigMap(){ - static configParams conf; - return conf; -} void FM_DB_UT_NAME_VAL( - std::string &result, - const std::string &lhs, const std::string &rhs) { + std::string &result, + const std::string &lhs, const std::string &rhs) { result+= lhs; result+= " = '"; result+=rhs; @@ -55,8 +44,8 @@ void FM_DB_UT_NAME_VAL( } void FM_DB_UT_NAME_PARAM( - std::string &result, - const std::string &lhs, const std::string &rhs) { + std::string &result, + const std::string &lhs, const std::string &rhs) { result+= lhs; result+= "="; result+=rhs; @@ -90,40 +79,6 @@ static int get_oldest_id(CFmDBSession &sess, const char* db_table){ return id; } -static void get_config_parameters(){ - CfmFile f; - std::string delimiter = "="; - std::string line, key, value; - size_t pos = 0; - - if (conf == NULL){ - FM_ERROR_LOG("The config file is not set\n"); - exit(-1); - } - - if (!f.open(conf, CfmFile::READ, false)){ - FM_ERROR_LOG("Failed to open config file: %s\n", conf); - exit(-1); - } - - while (true){ - if (!f.read_line(line)) break; - - if (line.size() == 0) continue; - - if (line[0] == '#') continue; - - pos = line.find(delimiter); - key = line.substr(0, pos); - value = line.erase(0, pos + delimiter.length()); - getConfigMap()[key] = value; - if (key.compare("sql_connection") != 0){ - // Don't log sql_connection, as it has a password - FM_INFO_LOG("Config key (%s), value (%s)", - key.c_str(), value.c_str()); - } - } -} static inline CFmDBSession & FmDbSessionFromHandle(TFmAlarmSessionT *p){ return *((CFmDBSession*)p); @@ -189,7 +144,7 @@ int fm_db_util_string_to_int(std::string val){ } void fm_db_util_make_timestamp_string(std::string &tstr, FMTimeT tm, - bool snmp){ + bool snmp){ struct timespec ts; if (tm != 0){ ts.tv_sec = tm / 1000000; @@ -517,28 +472,6 @@ bool fm_db_util_build_sql_delete_all(const char* db_table, const char *id, return true; } -void fm_db_util_set_conf_file(const char *fn){ - conf = fn; -} - -bool fm_db_util_get_config(std::string &key, std::string &val){ - - configParams::iterator it; - static int loaded = false; - CFmMutexGuard m(getConfMutex()); - - if (!loaded){ - get_config_parameters(); - loaded = true; - } - - it = getConfigMap().find(key); - if (it != getConfigMap().end()){ - val = it->second; - return true; - } - return false; -} int & fm_get_alarm_history_max_size(){ static int max_size = 0; @@ -546,7 +479,7 @@ int & fm_get_alarm_history_max_size(){ if (max_size == 0){ std::string val; std::string key = FM_EVENT_LOG_MAX_SIZE; - if (fm_db_util_get_config(key, val)){ + if (fm_get_config_key(key, val)){ max_size = fm_db_util_string_to_int(val); }else{ FM_ERROR_LOG("Fail to get config value for (%s)\n", key.c_str()); @@ -561,7 +494,7 @@ int & fm_get_log_max_size(){ if (max_size == 0){ std::string val; std::string key = FM_EVENT_LOG_MAX_SIZE; - if (fm_db_util_get_config(key, val)){ + if (fm_get_config_key(key, val)){ max_size = fm_db_util_string_to_int(val); }else{ FM_ERROR_LOG("Fail to get config value for (%s)\n", key.c_str()); @@ -570,34 +503,21 @@ int & fm_get_log_max_size(){ return max_size; } -std::string fm_db_util_get_system_name(CFmDBSession &sess){ - fm_db_result_t res; - std::string cmd; +std::string fm_db_util_get_system_info(const std::string prefix, std::string key){ + std::string val; std::string name = ""; - - fm_db_util_build_sql_query(FM_SYSTEM_TABLE_NAME, NULL, cmd); - if (sess.query(cmd.c_str(), res)){ - if (res.size() > 0){ - std::map entry = res[0]; - name = FM_ENTITY_ROOT_KEY + entry[FM_SYSTEM_NAME_COLUMN]; - } + if (fm_get_config_key(key, val)){ + name = prefix + val; } return name; } -std::string fm_db_util_get_region_name(CFmDBSession &sess){ - fm_db_result_t res; - std::string cmd; - std::string name = ""; +std::string fm_db_util_get_system_name(){ + return fm_db_util_get_system_info(FM_ENTITY_ROOT_KEY, FM_SYSTEM_NAME); +} - fm_db_util_build_sql_query(FM_SYSTEM_TABLE_NAME, NULL, cmd); - if (sess.query(cmd.c_str(), res)){ - if (res.size() > 0){ - std::map entry = res[0]; - name = FM_ENTITY_REGION_KEY + entry[FM_SYSTEM_REGION_COLUMN]; - } - } - return name; +std::string fm_db_util_get_region_name(){ + return fm_db_util_get_system_info(FM_ENTITY_REGION_KEY, FM_REGION_NAME); } bool fm_db_util_get_row_counts(CFmDBSession &sess, @@ -656,13 +576,12 @@ bool fm_db_util_get_next_log_id(CFmDBSession &sess, int &id){ return true; } -bool fm_db_util_create_session(CFmDBSession **sess){ +bool fm_db_util_create_session(CFmDBSession **sess, std::string key){ TFmAlarmSessionT handle; const char *db_conn = NULL; std::string val; - std::string key = FM_SQL_CONNECTION; - if (fm_db_util_get_config(key, val) != true){ + if (fm_get_config_key(key, val) != true){ FM_ERROR_LOG("Failed to get config for key: (%s)\n", key.c_str()); return false; } @@ -682,34 +601,34 @@ bool fm_db_util_sync_event_suppression(void){ std::string val; std::string key = FM_SQL_CONNECTION; - if (fm_db_util_get_config(key, val) != true){ - FM_ERROR_LOG("Failed to get config for key: (%s)\n", key.c_str()); + if (fm_get_config_key(key, val) != true){ + FM_ERROR_LOG("NEW Failed to get config for key: (%s)\n", key.c_str()); return false; } db_conn = val.c_str(); - FILE* file; - int argc; - char * argv[2]; + FILE* file; + int argc; + char * argv[2]; - FM_INFO_LOG("Starting event suppression synchronization...\n"); + FM_INFO_LOG("Starting event suppression synchronization...\n"); - argc = 2; - argv[0] = (char*)FM_DB_SYNC_EVENT_SUPPRESSION; - argv[1] = (char*)db_conn; + argc = 2; + argv[0] = (char*)FM_DB_SYNC_EVENT_SUPPRESSION; + argv[1] = (char*)db_conn; - Py_SetProgramName(argv[0]); - Py_Initialize(); - PySys_SetArgv(argc, argv); - file = fopen(FM_DB_SYNC_EVENT_SUPPRESSION,"r"); - PyRun_SimpleFile(file, FM_DB_SYNC_EVENT_SUPPRESSION); - fclose(file); - Py_Finalize(); + Py_SetProgramName(argv[0]); + Py_Initialize(); + PySys_SetArgv(argc, argv); + file = fopen(FM_DB_SYNC_EVENT_SUPPRESSION,"r"); + PyRun_SimpleFile(file, FM_DB_SYNC_EVENT_SUPPRESSION); + fclose(file); + Py_Finalize(); - FM_INFO_LOG("Completed event suppression synchronization.\n"); + FM_INFO_LOG("Completed event suppression synchronization.\n"); - return return_value; + return return_value; } diff --git a/fm-common/sources/fmDbUtils.h b/fm-common/sources/fmDbUtils.h index 5c6be631..b9f7284f 100644 --- a/fm-common/sources/fmDbUtils.h +++ b/fm-common/sources/fmDbUtils.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2014 Wind River Systems, Inc. +// Copyright (c) 2014-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -73,16 +73,15 @@ bool fm_db_util_build_sql_delete_all(const char* db_table, bool fm_db_util_get_row_counts(CFmDBSession &sess, const char* db_table, int &counts); -bool fm_db_util_create_session(CFmDBSession **sess); +bool fm_db_util_create_session(CFmDBSession **sess, + std::string key=FM_SQL_CONNECTION); -std::string fm_db_util_get_system_name(CFmDBSession &sess); +std::string fm_db_util_get_system_name(); -std::string fm_db_util_get_region_name(CFmDBSession &sess); +std::string fm_db_util_get_region_name(); void fm_db_util_set_conf_file(const char *fn); -bool fm_db_util_get_config(std::string &key, std::string &val); - bool fm_db_util_get_next_log_id(CFmDBSession &sess, int &id); std::string fm_db_util_int_to_string(int val); diff --git a/fm-common/sources/fmEventSuppression.cpp b/fm-common/sources/fmEventSuppression.cpp index b87370c0..788eeb5c 100644 --- a/fm-common/sources/fmEventSuppression.cpp +++ b/fm-common/sources/fmEventSuppression.cpp @@ -1,13 +1,12 @@ // -// Copyright (c) 2016 Wind River Systems, Inc. +// Copyright (c) 2016-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 -// #include #include -#include "fmDbConstants.h" +#include "fmConstants.h" #include "fmLog.h" #include "fmDbAlarm.h" #include "fmEventSuppression.h" @@ -73,6 +72,7 @@ bool CFmEventSuppressionOperation::set_table_notify_listen(CFmDBSession &sess){ sql = "SELECT rulename FROM pg_rules WHERE rulename='watch_event_supression'"; if ((sess.query(sql.c_str(), rule_name)) != true){ + FM_DEBUG_LOG("Failed to query the existing rule"); return false; } @@ -84,7 +84,8 @@ bool CFmEventSuppressionOperation::set_table_notify_listen(CFmDBSession &sess){ sql += FM_EVENT_SUPPRESSION_TABLE_NAME; sql += ")"; - if (sess.cmd(sql.c_str()) != true){ + if (sess.cmd(sql.c_str(), false) != true){ + FM_INFO_LOG("Failed to set rule CMD: (%s)", sql.c_str()); return false; } @@ -95,10 +96,6 @@ bool CFmEventSuppressionOperation::set_table_notify_listen(CFmDBSession &sess){ sql += FM_EVENT_SUPPRESSION_TABLE_NAME; FM_DEBUG_LOG("CMD:(%s)\n", sql.c_str()); - sess.cmd(sql.c_str()); // TODO: sess.cmd() returns false since no row affected by LISTEN command -/* if (sess.cmd(sql.c_str()) != true){ - return false; - } */ - - return true; + // no row affected by LISTEN command + return sess.cmd(sql.c_str(), false); } diff --git a/fm-common/sources/fmEventSuppression.h b/fm-common/sources/fmEventSuppression.h index 5a8297e8..fe8ec1a4 100644 --- a/fm-common/sources/fmEventSuppression.h +++ b/fm-common/sources/fmEventSuppression.h @@ -9,15 +9,6 @@ #define FMEVENTSUPPRESSION_H_ -/* -#include -#include -#include -#include - -#include "fmAPI.h" -#include "fmDbConstants.h" -*/ #include "fmDb.h" class CFmEventSuppressionOperation { diff --git a/fm-common/sources/fmLog.cpp b/fm-common/sources/fmLog.cpp index b867788e..eea08533 100644 --- a/fm-common/sources/fmLog.cpp +++ b/fm-common/sources/fmLog.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2014 Wind River Systems, Inc. +// Copyright (c) 2014-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -7,12 +7,14 @@ #include #include #include +#include +#include + #include "fmLog.h" #include "fmDbAlarm.h" #include "fmDbEventLog.h" - -#include -#include +#include "fmConfig.h" +#include "fmConstants.h" static pthread_mutex_t mutex = PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP; @@ -24,6 +26,13 @@ void fmLoggingInit() { openlog(NULL,LOG_CONS | LOG_NDELAY,LOG_LOCAL1); setlogmask(LOG_UPTO (LOG_INFO)); } + std::string val; + std::string key = FM_DEBUG_FLAG; + if ((fm_get_config_key(key, val)) && (val.compare("True") == 0)){ + setlogmask(LOG_UPTO (LOG_DEBUG)); + } else { + setlogmask(LOG_UPTO (LOG_INFO)); + } has_inited=true; } @@ -36,11 +45,6 @@ void fmLogMsg(int level, const char *data,...){ va_end(ap); } -bool fmLogFileInit(){ - fmLoggingInit(); - return true; -} - // formats event into json form for logging static char * formattedEvent(CFmDbEventLog::data_type event_map, char * output, int outputSize) { int bufLen = 1024; diff --git a/fm-common/sources/fmLog.h b/fm-common/sources/fmLog.h index d1f5ad12..1d5ec0a2 100644 --- a/fm-common/sources/fmLog.h +++ b/fm-common/sources/fmLog.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2014 Wind River Systems, Inc. +// Copyright (c) 2014-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -68,9 +68,6 @@ bool fmLogFileInit(); void fmLogAddEventLog(SFmAlarmDataT * data, bool is_event_suppressed); -//void fmLogAddEventLog(SFmAlarmDataT * data); - - #endif diff --git a/fm-common/sources/fmMsgServer.cpp b/fm-common/sources/fmMsgServer.cpp index 002dd730..921ea8da 100644 --- a/fm-common/sources/fmMsgServer.cpp +++ b/fm-common/sources/fmMsgServer.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Wind River Systems, Inc. +// Copyright (c) 2017-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -40,8 +40,9 @@ #include "fmSnmpUtils.h" #include "fmDbUtils.h" #include "fmDbEventLog.h" -#include "fmDbConstants.h" +#include "fmConstants.h" #include "fmEventSuppression.h" +#include "fmConfig.h" #define FM_UUID_LENGTH 36 @@ -125,7 +126,7 @@ static bool dequeue_get(sFmGetReq &req){ return true; } -void create_db_log(CFmDBSession &sess, sFmJobReq &req){ +void create_db_log(sFmJobReq &req){ SFmAlarmDataT alarm = req.data; if (alarm.alarm_state != FM_ALARM_STATE_MSG){ @@ -135,7 +136,7 @@ void create_db_log(CFmDBSession &sess, sFmJobReq &req){ } fmLogAddEventLog(&alarm, false); - fm_snmp_util_gen_trap(sess, FM_ALARM_MESSAGE, alarm); + fm_snmp_util_gen_trap(FM_ALARM_MESSAGE, alarm); } void get_db_alarm(CFmDBSession &sess, sFmGetReq &req, void *context){ @@ -293,7 +294,7 @@ void fm_handle_job_request(CFmDBSession &sess, sFmJobReq &req){ //check if it is a customer log request if (req.type == FM_CUSTOMER_LOG) { - return create_db_log(sess,req); + return create_db_log(req); } // check to see if there are any alarms need to be masked/unmasked @@ -317,7 +318,7 @@ void fm_handle_job_request(CFmDBSession &sess, sFmJobReq &req){ req.data.alarm_id); } else { if (!is_event_suppressed) - fm_snmp_util_gen_trap(sess, req.type, req.data); + fm_snmp_util_gen_trap(req.type, req.data); } fmLogAddEventLog(&req.data, is_event_suppressed); @@ -572,14 +573,10 @@ EFmErrorT fm_server_create(const char *fn) { hints.ai_addr = NULL; hints.ai_next = NULL; + fm_conf_set_file(fn); + fmLoggingInit(); - if (!fmLogFileInit()){ - exit(-1); - } - - fm_db_util_set_conf_file(fn); - if (!fm_db_util_sync_event_suppression()){ exit(-1); } @@ -704,7 +701,7 @@ bool fm_handle_event_suppress_changes(CFmDBSession &sess){ } SFmAlarmDataT *alarm = NULL; - fm_snmp_util_gen_trap(sess, FM_WARM_START, *alarm); + fm_snmp_util_gen_trap(FM_WARM_START, *alarm); return true; } diff --git a/fm-common/sources/fmSnmpConstants.h b/fm-common/sources/fmSnmpConstants.h index 8cc3981a..061d11c2 100644 --- a/fm-common/sources/fmSnmpConstants.h +++ b/fm-common/sources/fmSnmpConstants.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2014 Wind River Systems, Inc. +// Copyright (c) 2017-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -14,12 +14,13 @@ #define FM_CUSTOMER_LOG 10 -/* Trap Destination table name */ -#define FM_TRAPDEST_TABLE_NAME "i_trap_destination" +/* Trap Destination definitions */ -#define FM_TRAPDEST_IP_COLUMN "ip_address" +#define FM_SNMP_TRAPDEST "trap_destinations" -#define FM_TRAPDEST_COMM_COLUMN "community" +#define FM_TRAPDEST_IP "ip_address" + +#define FM_TRAPDEST_COMM "community" /* MIB Trap definitions */ const std::string WRS_ALARM_MIB = "WRS-ALARM-MIB"; diff --git a/fm-common/sources/fmSnmpUtils.cpp b/fm-common/sources/fmSnmpUtils.cpp index 0fd311f7..a88066a9 100644 --- a/fm-common/sources/fmSnmpUtils.cpp +++ b/fm-common/sources/fmSnmpUtils.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2014 Wind River Systems, Inc. +// Copyright (c) 2014-2018 Wind River Systems, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -11,6 +11,7 @@ #include #include #include +#include #include "fmDbAPI.h" #include "fmFile.h" @@ -21,6 +22,7 @@ #include "fmDbUtils.h" #include "fmSnmpConstants.h" #include "fmSnmpUtils.h" +#include "fmConfig.h" typedef std::map int_to_objtype; @@ -28,6 +30,11 @@ static int_to_objtype objtype_map; static pthread_mutex_t mutex = PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP; +fm_db_result_t &getTrapDestList(){ + static fm_db_result_t trap_dest_list; + return trap_dest_list; +} + static void add_to_table(int t, std::string objtype, int_to_objtype &tbl) { tbl[t]=objtype; } @@ -72,14 +79,38 @@ static std::string get_trap_objtype(int type){ init_objtype_table(); return objtype_map[type]; } -static bool get_trap_dest_list(CFmDBSession &sess,fm_db_result_t & res){ - std::string cmd; - fm_db_util_build_sql_query(FM_TRAPDEST_TABLE_NAME, NULL, cmd); - return sess.query(cmd.c_str(), res); +static void add_to_list(std::vector &trap_strings) { + std::string delimiter = " "; + + std::vector::iterator it = trap_strings.begin(); + std::vector::iterator end = trap_strings.end(); + getTrapDestList().clear(); + for (; it != end; ++it){ + size_t pos = 0; + fm_db_single_result_t entry; + pos = (*it).find(delimiter); + entry[FM_TRAPDEST_IP] = (*it).substr(0, pos); + entry[FM_TRAPDEST_COMM] = (*it).erase(0, pos + delimiter.length()); + getTrapDestList().push_back(entry); + } } -static std::string format_trap_cmd(CFmDBSession &sess, int type, SFmAlarmDataT &data, +void set_trap_dest_list(std::string value){ + + std::vector entries; + std::istringstream f(value); + std::string s; + while (getline(f, s, ',')) { + std::cout << s << std::endl; + FM_INFO_LOG("Add entry: (%s)", s.c_str()); + entries.push_back(s); + } + add_to_list(entries); + FM_INFO_LOG("Set trap entries: (%d)", getTrapDestList().size()); +} + +static std::string format_trap_cmd(int type, SFmAlarmDataT &data, std::string &ip, std::string &comm){ std::string cmd; std::string objtype; @@ -140,28 +171,29 @@ static std::string format_trap_cmd(CFmDBSession &sess, int type, SFmAlarmDataT & return cmd; } -bool fm_snmp_util_gen_trap(CFmDBSession &sess, int type, SFmAlarmDataT &data) { + +bool fm_snmp_util_gen_trap(int type, SFmAlarmDataT &data) { bool rc = true; fm_buff_t cmdbuff; fm_db_result_t res; std::string cmd, eid; - if (!get_trap_dest_list(sess,res)) return false; + res = getTrapDestList(); - if (&data != NULL) { - eid.assign(data.entity_instance_id); - std::string region_name = fm_db_util_get_region_name(sess); - std::string sys_name = fm_db_util_get_system_name(sess); - if (sys_name.length() != 0){ - eid = sys_name + "."+ eid; - } - if (region_name.length() != 0){ - eid = region_name + "."+ eid; - } - strncpy(data.entity_instance_id, eid.c_str(), - sizeof(data.entity_instance_id)-1); - } + if (&data != NULL) { + eid.assign(data.entity_instance_id); + std::string region_name = fm_db_util_get_region_name(); + std::string sys_name = fm_db_util_get_system_name(); + if (sys_name.length() != 0){ + eid = sys_name + "."+ eid; + } + if (region_name.length() != 0){ + eid = region_name + "."+ eid; + } + strncpy(data.entity_instance_id, eid.c_str(), + sizeof(data.entity_instance_id)-1); + } fm_db_result_t::iterator it = res.begin(); fm_db_result_t::iterator end = res.end(); @@ -169,9 +201,9 @@ bool fm_snmp_util_gen_trap(CFmDBSession &sess, int type, SFmAlarmDataT &data) { for (; it != end; ++it){ memset(&(cmdbuff[0]), 0, cmdbuff.size()); cmd.clear(); - std::string ip = (*it)[FM_TRAPDEST_IP_COLUMN]; - std::string comm = (*it)[FM_TRAPDEST_COMM_COLUMN]; - cmd = format_trap_cmd(sess,type, data, ip, comm); + std::string ip = (*it)[FM_TRAPDEST_IP]; + std::string comm = (*it)[FM_TRAPDEST_COMM]; + cmd = format_trap_cmd(type, data, ip, comm); //FM_INFO_LOG("run cmd: %s\n", cmd.c_str()); char *pline = &(cmdbuff[0]); @@ -190,42 +222,17 @@ bool fm_snmp_util_gen_trap(CFmDBSession &sess, int type, SFmAlarmDataT &data) { } static bool fm_snmp_get_db_connection(std::string &connection){ - CfmFile f; - const char *fn = "/etc/fm.conf"; - std::string sql_key = FM_SQL_CONNECTION; - std::string delimiter = "="; - std::string line, key, value; - size_t pos = 0; + const char *fn = "/etc/fm/fm.conf"; + std::string key = FM_SQL_CONNECTION; - if (!f.open(fn, CfmFile::READ, false)){ - FM_ERROR_LOG("Failed to open config file: %s\n", fn); - exit (-1); - } - - while (true){ - if (!f.read_line(line)) break; - - if (line.size() == 0) continue; - - pos = line.find(delimiter); - key = line.substr(0, pos); - if (key == sql_key){ - value = line.erase(0, pos + delimiter.length()); - // Don't log sql_connection, as it has a password - //FM_DEBUG_LOG("Found it: (%s)\n", value.c_str()); - connection = value; - return true; - } - } - - return false;; + fm_conf_set_file(fn); + return fm_get_config_key(key, connection); } extern "C" { bool fm_snmp_util_create_session(TFmAlarmSessionT *handle, const char* db_conn){ - std::string key = FM_SQL_CONNECTION; std::string conn; CFmDBSession *sess = new CFmDBSession; if (sess==NULL) return false;; diff --git a/fm-common/sources/fmSnmpUtils.h b/fm-common/sources/fmSnmpUtils.h index e40573b5..3e6856e9 100644 --- a/fm-common/sources/fmSnmpUtils.h +++ b/fm-common/sources/fmSnmpUtils.h @@ -12,6 +12,8 @@ #include "fmAPI.h" #include "fmDb.h" -bool fm_snmp_util_gen_trap(CFmDBSession &sess, int type, SFmAlarmDataT &data); +bool fm_snmp_util_gen_trap(int type, SFmAlarmDataT &data); + +void set_trap_dest_list(std::string value); #endif diff --git a/fm-common/sources/fm_db_sync_event_suppression.py b/fm-common/sources/fm_db_sync_event_suppression.py index 7afc6847..68bf464a 100755 --- a/fm-common/sources/fm_db_sync_event_suppression.py +++ b/fm-common/sources/fm_db_sync_event_suppression.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright (c) 2016 Wind River Systems, Inc. +# Copyright (c) 2016-2018 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -40,13 +40,13 @@ class EventSuppression(Base): class ialarm(Base): - __tablename__ = 'i_alarm' + __tablename__ = 'alarm' id = Column(Integer, primary_key=True, nullable=False) alarm_id = Column('alarm_id', String(255), index=True) class event_log(Base): - __tablename__ = 'i_event_log' + __tablename__ = 'event_log' id = Column(Integer, primary_key=True, nullable=False) event_log_id = Column('event_log_id', String(255), index=True) state = Column(String(255)) diff --git a/fm-common/sources/fm_python_mod_main.cpp b/fm-common/sources/fm_python_mod_main.cpp new file mode 100644 index 00000000..acfc0769 --- /dev/null +++ b/fm-common/sources/fm_python_mod_main.cpp @@ -0,0 +1,311 @@ +// +// Copyright (c) 2018 Wind River Systems, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// + + +#include +#include +#include "fmAPI.h" +#include "fmAlarmUtils.h" + + +static const size_t DEF_MAX_ALARMS (1000); +static const size_t MAXSTRINGSIZE (500); +static PyObject *logging = NULL; + +enum { error, warning, info, debug, max_level }; + + +#define LOG_MSG(level,data,...) \ + log_msg(level, "fm_python_extension: "\ + data, ## __VA_ARGS__ ) + +#define ERROR_LOG(data,...) \ + LOG_MSG(error, data, ## __VA_ARGS__) + +#define WARNING_LOG(data,...) \ + LOG_MSG(warning, data, ## __VA_ARGS__) + +#define INFO_LOG(data,...) \ + LOG_MSG(info, data, ## __VA_ARGS__) + +#define DEBUG_LOG(data,...) \ + LOG_MSG(debug, data, ## __VA_ARGS__) + + +static void log_msg(int type, const char *data,...) +{ + static PyObject *str = NULL; + const char* methods[] = {"error", "warning", "info", "debug"}; + + if (logging == NULL) { + logging = PyImport_ImportModuleNoBlock("logging"); + if (logging == NULL) { + PyErr_SetString(PyExc_ImportError, + "Could not import python module 'logging'"); + } + } + + va_list ap; + char buff[MAXSTRINGSIZE]; + va_start(ap, data ); + vsnprintf(buff, sizeof(buff), data, ap); + va_end(ap); + + str = Py_BuildValue((char *)"s", buff); + + if (type < max_level) { + PyObject_CallMethod(logging, (char *)methods[type], (char *)"O", str); + } + + Py_DECREF(str); +} + +static PyObject * _fm_set(PyObject * self, PyObject *args) { + + SFmAlarmDataT alm_data; + std::string alarm; + fm_uuid_t tmp_uuid; + const char *alm_str; + EFmErrorT rc; + + if (!PyArg_ParseTuple(args, "s", &alm_str)) { + ERROR_LOG("Failed to parse args."); + Py_RETURN_NONE; + } + + alarm.assign(alm_str); + if (!fm_alarm_from_string(alarm, &alm_data)) { + ERROR_LOG("Failed to convert string to alarm."); + Py_RETURN_NONE; + } + + rc = fm_set_fault(&alm_data, &tmp_uuid); + if (rc == FM_ERR_OK) { + return PyString_FromString(&(tmp_uuid[0])); + } + + if (rc == FM_ERR_NOCONNECT){ + // when the fm-manager process has not been started by SM + WARNING_LOG("Failed to connect to FM manager"); + } else { + ERROR_LOG("Failed to generate an alarm: (%s) (%s)", + alm_data.alarm_id, alm_data.entity_instance_id); + } + + Py_RETURN_NONE; +} + +static PyObject * _fm_get(PyObject * self, PyObject *args) { + + const char *filter; + std::string alm_str, filter_str; + AlarmFilter af; + SFmAlarmDataT ad; + EFmErrorT rc; + + if (!PyArg_ParseTuple(args, "s", &filter)) { + ERROR_LOG("Failed to parse args"); + Py_RETURN_NONE; + } + + filter_str.assign(filter); + if (!fm_alarm_filter_from_string(filter_str, &af)) { + ERROR_LOG("Invalid alarm filter: (%s)", filter_str.c_str()); + Py_RETURN_NONE; + } + + rc = fm_get_fault(&af,&ad); + if (rc == FM_ERR_OK) { + fm_alarm_to_string(&ad,alm_str); + return PyString_FromString(alm_str.c_str()); + } + + if (rc == FM_ERR_ENTITY_NOT_FOUND) { + DEBUG_LOG("Alarm id (%s), Entity id:(%s) not found", + af.alarm_id, af.entity_instance_id); + } else if (rc == FM_ERR_NOCONNECT) { + WARNING_LOG("Failed to connect to FM manager"); + } else { + ERROR_LOG("Failed to get alarm by filter: (%s) (%s), error code: (%d)", + af.alarm_id, af.entity_instance_id, rc); + } + Py_RETURN_NONE; +} + + +static PyObject * _fm_get_by_aid(PyObject * self, PyObject *args, PyObject* kwargs) { + const char *aid; + fm_alarm_id alm_id; + unsigned int max = DEF_MAX_ALARMS; + char* keywords[] = {"alarm_id", "max", (char*)NULL}; + + memset(alm_id, 0 , sizeof(alm_id)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s|i", keywords, &aid, &max)) { + ERROR_LOG("Failed to parse args"); + Py_RETURN_NONE; + } + strncpy(alm_id, aid, sizeof(alm_id)-1); + + std::vector< SFmAlarmDataT > lst; + try { + lst.resize(max); + } catch(...) { + ERROR_LOG("Failed to allocate memory"); + Py_RETURN_NONE; + } + unsigned int max_alarms_to_get = max; + EFmErrorT rc = fm_get_faults_by_id(&alm_id, &(lst[0]), &max_alarms_to_get); + if (rc == FM_ERR_OK) { + PyObject *__lst = PyList_New(0); + for ( size_t ix = 0 ; ix < max_alarms_to_get ; ++ix ) { + std::string s; + fm_alarm_to_string(&lst[ix],s); + if (s.size() > 0) { + if (PyList_Append(__lst, PyString_FromString(s.c_str())) != 0) { + ERROR_LOG("Failed to append alarm to the list"); + } + } + } + /* python will garbage collect if the reference count is correct + (it should be 1 at this point) */ + return __lst; + } + + if (rc == FM_ERR_ENTITY_NOT_FOUND) { + DEBUG_LOG("No alarm found for alarm id (%s)", alm_id); + } else if (rc == FM_ERR_NOCONNECT) { + WARNING_LOG("Failed to connect to FM manager"); + } else { + ERROR_LOG("Failed to get alarm list for alarm id (%s), error code: (%d)", alm_id, rc); + } + Py_RETURN_NONE; +} + +static PyObject * _fm_get_by_eid(PyObject * self, PyObject *args, PyObject* kwargs) { + const char *eid; + fm_ent_inst_t inst_id; + std::vector< SFmAlarmDataT > lst; + unsigned int max= DEF_MAX_ALARMS; + char* keywords[] = {"entity_instance_id", "max", (char*)NULL}; + + memset(inst_id, 0 , sizeof(inst_id)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s|i", keywords, &eid, &max)) { + ERROR_LOG("Failed to parse args"); + Py_RETURN_NONE; + } + strncpy(inst_id, eid ,sizeof(inst_id)-1); + + try { + lst.resize(max); + } catch(...) { + ERROR_LOG("Failed to allocate memory"); + Py_RETURN_NONE; + } + unsigned int max_alarms_to_get = max; + EFmErrorT rc = fm_get_faults(&inst_id, &(lst[0]), &max_alarms_to_get); + if (rc == FM_ERR_OK) { + PyObject *__lst = PyList_New(0); + for ( size_t ix = 0; ix < max_alarms_to_get; ++ix ) { + std::string s; + fm_alarm_to_string(&lst[ix], s); + if (s.size() > 0) { + if (PyList_Append(__lst,PyString_FromString(s.c_str())) != 0) { + ERROR_LOG("Failed to append alarm to the list"); + } + } + } + /* python will garbage collect if the reference count is correct + (it should be 1 at this point) */ + return __lst; + } + + if (rc == FM_ERR_ENTITY_NOT_FOUND) { + DEBUG_LOG("No alarm found for entity id (%s)", inst_id); + } else if (rc == FM_ERR_NOCONNECT) { + WARNING_LOG("Failed to connect to FM manager"); + } else { + ERROR_LOG("Failed to get alarm list for entity id (%s), error code: (%d)", inst_id, rc); + } + Py_RETURN_NONE; +} + +static PyObject * _fm_clear(PyObject * self, PyObject *args) { + + const char *filter; + std::string alm_str, filter_str; + AlarmFilter af; + EFmErrorT rc; + + if (!PyArg_ParseTuple(args, "s", &filter)) { + ERROR_LOG("Failed to parse args"); + Py_RETURN_FALSE; + } + + filter_str.assign(filter); + if (!fm_alarm_filter_from_string(filter_str, &af)) { + ERROR_LOG("Invalid alarm filter: (%s)", filter_str.c_str()); + Py_RETURN_FALSE; + } + + rc = fm_clear_fault(&af); + if (rc == FM_ERR_OK) { + Py_RETURN_TRUE; + } + + if (rc == FM_ERR_ENTITY_NOT_FOUND) { + DEBUG_LOG("No alarm found to clear: (%s) (%s)", af.alarm_id, af.entity_instance_id); + } else if (rc == FM_ERR_NOCONNECT) { + WARNING_LOG("Failed to connect to FM manager"); + } else { + ERROR_LOG("Failed to clear alarm by filter: (%s) (%s), error code: (%d)", + af.alarm_id, af.entity_instance_id, rc); + } + Py_RETURN_FALSE; +} + +static PyObject * _fm_clear_all(PyObject * self, PyObject *args) { + + fm_ent_inst_t inst_id; + const char *eid; + EFmErrorT rc; + + memset(inst_id, 0 , sizeof(inst_id)); + if (!PyArg_ParseTuple(args,"s", &eid)) { + ERROR_LOG("Failed to parse args"); + Py_RETURN_FALSE; + } + + strncpy(inst_id, eid ,sizeof(inst_id)-1); + rc = fm_clear_all(&inst_id); + if (rc == FM_ERR_OK) { + Py_RETURN_TRUE; + } else { + ERROR_LOG("Failed to clear alarms with entity id (%s), error code: (%d)", + inst_id, rc); + Py_RETURN_FALSE; + } +} + +static PyMethodDef _methods [] = { + { "set", _fm_set, METH_VARARGS, "Set or update an alarm" }, + { "get", _fm_get, METH_VARARGS, "Get alarms by filter" }, + { "clear", _fm_clear, METH_VARARGS, "Clear an alarm by filter" }, + { "clear_all", _fm_clear_all, METH_VARARGS, + "Clear alarms that match the entity instance id"}, + { "get_by_aid", (PyCFunction)_fm_get_by_aid, METH_VARARGS | METH_KEYWORDS, + "Get alarms by alarm id" }, + { "get_by_eid", (PyCFunction)_fm_get_by_eid, METH_VARARGS | METH_KEYWORDS, + "Get alarms by entity instance id" }, + { NULL, NULL, 0, NULL } +}; + +PyMODINIT_FUNC initfm_core() { + PyObject *m = Py_InitModule("fm_core", _methods); + if (m == NULL){ + PySys_WriteStderr("Failed to initialize fm_core"); + return; + } +} diff --git a/fm-common/sources/setup.py b/fm-common/sources/setup.py new file mode 100644 index 00000000..bc19a043 --- /dev/null +++ b/fm-common/sources/setup.py @@ -0,0 +1,11 @@ + +from distutils.core import setup, Extension + +setup(name="fm_core", version="1.0", + ext_modules=[Extension("fm_core", [ + "fm_python_mod_main.cpp"], + libraries=[ + 'pq', + "fmcommon"], + library_dirs=['.'] + )]) diff --git a/fm-mgr/centos/fm-mgr.spec b/fm-mgr/centos/fm-mgr.spec index 34171e38..47e77a3b 100644 --- a/fm-mgr/centos/fm-mgr.spec +++ b/fm-mgr/centos/fm-mgr.spec @@ -5,7 +5,7 @@ Summary: CGTS Platform Fault Manager Package Name: fm-mgr Version: 1.0 Release: %{tis_patch_ver}%{?_tis_dist} -License: Apache-2.0 +License: windriver Group: base Packager: Wind River URL: unknown @@ -15,7 +15,7 @@ BuildRequires: systemd-devel BuildRequires: libuuid-devel %description -CGTS platform Fault Manager that serves the client +CGTS platform Fault Manager that serves the client application fault management requests and raise/clear/update alarms in the active alarm database. @@ -43,7 +43,6 @@ rm -rf $RPM_BUILD_ROOT %defattr(-,root,root,-) %doc LICENSE %{local_bindir}/fmManager -%config(noreplace) %{_sysconfdir}/fm.conf %_sysconfdir/init.d/fminit %{_unitdir}/fminit.service %config(noreplace) %{_sysconfdir}/logrotate.d/fm.logrotate diff --git a/fm-mgr/sources/Makefile b/fm-mgr/sources/Makefile index dd96719a..74acf358 100755 --- a/fm-mgr/sources/Makefile +++ b/fm-mgr/sources/Makefile @@ -23,7 +23,6 @@ install_non_bb: install -m 755 -d $(DEST_DIR)/etc/logrotate.d install -m 755 -d $(DEST_DIR)/usr/local install -m 755 -d $(DEST_DIR)/usr/local/bin - install -m 644 fm.conf $(DEST_DIR)/etc/fm.conf install -m 755 fminit $(DEST_DIR)/etc/init.d/fminit install -m 755 fmManager $(DEST_DIR)/usr/local/bin/fmManager install -m 644 fm.logrotate $(DEST_DIR)/etc/logrotate.d/fm.logrotate diff --git a/fm-mgr/sources/fm_main.cpp b/fm-mgr/sources/fm_main.cpp index 760a5395..7e2df7e6 100644 --- a/fm-mgr/sources/fm_main.cpp +++ b/fm-mgr/sources/fm_main.cpp @@ -6,25 +6,20 @@ #include #include - -#include - #include #include #include #include +#include +#include +#include + void sig_handler(int signo) { - int result = 0; if (signo == SIGHUP){ - result = setlogmask(LOG_UPTO (LOG_DEBUG)); - if (result == LOG_UPTO (LOG_DEBUG)){ - result = setlogmask(LOG_UPTO (LOG_INFO)); - syslog(LOG_INFO, "Received SIGHUP, set log level from %d to LOG_INFO", result); - }else{ - syslog(LOG_INFO, "Received SIGHUP, set log level from %d to LOG_DEBUG", result); - } - } + fm_get_config_paramters(); + fmLoggingInit(); + } } int main(int argc, char *argv[]) { diff --git a/fm-mgr/sources/fminit b/fm-mgr/sources/fminit index 37b87c91..3b555eb7 100755 --- a/fm-mgr/sources/fminit +++ b/fm-mgr/sources/fminit @@ -20,8 +20,7 @@ FMMGR_NAME="fmManager" FMMGR="/usr/local/bin/${FMMGR_NAME}" PIDFILE=/var/run/${FMMGR_NAME}.pid -CONFIGFILE=/etc/fm.conf - +CONFIGFILE=/etc/fm/fm.conf # Linux Standard Base (LSB) Error Codes RETVAL=0 diff --git a/fm-rest-api/PKG-INFO b/fm-rest-api/PKG-INFO new file mode 100644 index 00000000..7fea993d --- /dev/null +++ b/fm-rest-api/PKG-INFO @@ -0,0 +1,12 @@ +Metadata-Version: 1.1 +Name: fm-rest-api +Version: 1.0 +Summary: Fault Manager REST API +Home-page: +Author: Windriver +Author-email: info@windriver.com +License: windriver + +Description: Fault Manager REST API + +Platform: UNKNOWN diff --git a/fm-rest-api/centos/build_srpm.data b/fm-rest-api/centos/build_srpm.data new file mode 100644 index 00000000..a8b255df --- /dev/null +++ b/fm-rest-api/centos/build_srpm.data @@ -0,0 +1,2 @@ +SRC_DIR="fm" +TIS_PATCH_VER=1 diff --git a/fm-rest-api/centos/fm-rest-api.spec b/fm-rest-api/centos/fm-rest-api.spec new file mode 100644 index 00000000..27011628 --- /dev/null +++ b/fm-rest-api/centos/fm-rest-api.spec @@ -0,0 +1,96 @@ +Summary: Fault Management Openstack REST API +Name: fm-rest-api +Version: 1.0 +Release: %{tis_patch_ver}%{?_tis_dist} +License: windriver +Group: base +Packager: Wind River +URL: unknown +Source0: %{name}-%{version}.tar.gz + + +BuildRequires: python-setuptools +BuildRequires: python-oslo-config +BuildRequires: python-oslo-db +BuildRequires: python-oslo-log +BuildRequires: python-oslo-messaging +BuildRequires: python-oslo-middleware + +Requires: python-eventlet +Requires: python-webob +Requires: python-paste + +BuildRequires: systemd + +%description +Fault Management Openstack REST API Service + +%define local_bindir /usr/bin/ +%define local_initddir /etc/rc.d/init.d +%define pythonroot /usr/lib64/python2.7/site-packages +%define local_etc_pmond /etc/pmon.d/ +%define debug_package %{nil} + +%prep +%autosetup -n %{name}-%{version} + +# Remove bundled egg-info +rm -rf *.egg-info + +%build +echo "Start build" + +export PBR_VERSION=%{version} +%{__python} setup.py build +PYTHONPATH=. oslo-config-generator --config-file=fm/config-generator.conf + +%install +echo "Start install" +export PBR_VERSION=%{version} +%{__python} setup.py install --root=%{buildroot} \ + --install-lib=%{pythonroot} \ + --prefix=/usr \ + --install-data=/usr/share \ + --single-version-externally-managed + +install -p -D -m 644 scripts/fm-api.service %{buildroot}%{_unitdir}/fm-api.service +install -d -m 755 %{buildroot}%{local_initddir} +install -p -D -m 755 scripts/fm-api %{buildroot}%{local_initddir}/fm-api + +install -d -m 755 %{buildroot}%{local_etc_pmond} +install -p -D -m 644 fm-api-pmond.conf %{buildroot}%{local_etc_pmond}/fm-api.conf + +# Install sql migration stuff that wasn't installed by setup.py +install -m 640 fm/db/sqlalchemy/migrate_repo/migrate.cfg %{buildroot}%{pythonroot}/fm/db/sqlalchemy/migrate_repo/migrate.cfg + +# install default config files +cd %{_builddir}/%{name}-%{version} && oslo-config-generator --config-file fm/config-generator.conf --output-file %{_builddir}/%{name}-%{version}/fm.conf.sample +install -p -D -m 644 %{_builddir}/%{name}-%{version}/fm.conf.sample %{buildroot}%{_sysconfdir}/fm/fm.conf + +%clean +echo "CLEAN CALLED" +rm -rf $RPM_BUILD_ROOT + +%post +/bin/systemctl enable fm-api.service >/dev/null 2>&1 + +%files +%defattr(-,root,root,-) +%doc LICENSE + +%{local_bindir}/* + +%{local_initddir}/* + +%{pythonroot}/fm/* + +%{pythonroot}/fm-%{version}*.egg-info + +%config(noreplace) %{_sysconfdir}/fm/fm.conf + +# systemctl service files +%{_unitdir}/fm-api.service + +# pmond config file +%{local_etc_pmond}/fm-api.conf + diff --git a/fm-rest-api/fm/LICENSE b/fm-rest-api/fm/LICENSE new file mode 100755 index 00000000..68c771a0 --- /dev/null +++ b/fm-rest-api/fm/LICENSE @@ -0,0 +1,176 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + diff --git a/fm-rest-api/fm/fm-api-pmond.conf b/fm-rest-api/fm/fm-api-pmond.conf new file mode 100644 index 00000000..017f4cbb --- /dev/null +++ b/fm-rest-api/fm/fm-api-pmond.conf @@ -0,0 +1,10 @@ +[process] +process = fm-api +pidfile = /var/run/fm-api.pid +script = /etc/init.d/fm-api +style = lsb ; ocf or lsb +severity = major ; minor, major, critical +restarts = 3 ; restarts before error assertion +interval = 5 ; number of seconds to wait between restarts +debounce = 20 ; number of seconds to wait before degrade clear + diff --git a/fm-rest-api/fm/fm/__init__.py b/fm-rest-api/fm/fm/__init__.py new file mode 100644 index 00000000..b98b5055 --- /dev/null +++ b/fm-rest-api/fm/fm/__init__.py @@ -0,0 +1,5 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/fm-rest-api/fm/fm/api/__init__.py b/fm-rest-api/fm/fm/api/__init__.py new file mode 100644 index 00000000..b98b5055 --- /dev/null +++ b/fm-rest-api/fm/fm/api/__init__.py @@ -0,0 +1,5 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/fm-rest-api/fm/fm/api/app.py b/fm-rest-api/fm/fm/api/app.py new file mode 100644 index 00000000..4e3fe99d --- /dev/null +++ b/fm-rest-api/fm/fm/api/app.py @@ -0,0 +1,85 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +from oslo_service import service +from oslo_service import wsgi +from oslo_config import cfg +from oslo_log import log +import pecan + +from fm.api import config +from fm.common import policy +from fm.common.i18n import _ + +CONF = cfg.CONF + +LOG = log.getLogger(__name__) + +_launcher = None + + +def get_pecan_config(): + # Set up the pecan configuration + filename = config.__file__.replace('.pyc', '.py') + return pecan.configuration.conf_from_file(filename) + + +def setup_app(config=None): + policy.init() + + if not config: + config = get_pecan_config() + + pecan.configuration.set_config(dict(config), overwrite=True) + app_conf = dict(config.app) + + app = pecan.make_app( + app_conf.pop('root'), + debug=CONF.debug, + logging=getattr(config, 'logging', {}), + force_canonical=getattr(config.app, 'force_canonical', True), + guess_content_type_from_ext=False, + **app_conf + ) + return app + + +def load_paste_app(app_name=None): + """Loads a WSGI app from a paste config file.""" + if app_name is None: + app_name = cfg.CONF.prog + + loader = wsgi.Loader(cfg.CONF) + app = loader.load_app(app_name) + return app + + +def app_factory(global_config, **local_conf): + return setup_app() + + +def serve(api_service, conf, workers=1): + global _launcher + if _launcher: + raise RuntimeError(_('serve() can only be called once')) + + _launcher = service.launch(conf, api_service, workers=workers) + + +def wait(): + _launcher.wait() diff --git a/fm-rest-api/fm/fm/api/config.py b/fm-rest-api/fm/fm/api/config.py new file mode 100644 index 00000000..041a556f --- /dev/null +++ b/fm-rest-api/fm/fm/api/config.py @@ -0,0 +1,67 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import sys +import pbr.version +from oslo_config import cfg +from oslo_log import log as logging +from keystoneauth1 import loading as ks_loading +from fm.api import hooks + +LOG = logging.getLogger(__name__) + +sysinv_group = cfg.OptGroup( + 'sysinv', + title='Sysinv Options', + help="Configuration options for the platform service") + +sysinv_opts = [ + cfg.StrOpt('catalog_info', + default='platform:sysinv:internalURL', + help="Service catalog Look up info."), + cfg.StrOpt('os_region_name', + default='RegionOne', + help="Region name of this node. It is used for catalog lookup"), +] + +version_info = pbr.version.VersionInfo('fm') + +# Pecan Application Configurations +app = { + 'root': 'fm.api.controllers.root.RootController', + 'modules': ['fm.api'], + 'hooks': [ + hooks.ContextHook(), + hooks.DBHook(), + ], + 'acl_public_routes': [ + '/', + '/v1', + ], +} + + +def init(args, **kwargs): + cfg.CONF.register_group(sysinv_group) + cfg.CONF.register_opts(sysinv_opts, group=sysinv_group) + ks_loading.register_session_conf_options(cfg.CONF, + sysinv_group.name) + logging.register_options(cfg.CONF) + + cfg.CONF(args=args, project='fm', + version='%%(prog)s %s' % version_info.release_string(), + **kwargs) + + +def setup_logging(): + """Sets up the logging options for a log with supplied name.""" + logging.setup(cfg.CONF, "fm") + LOG.debug("Logging enabled!") + LOG.debug("%(prog)s version %(version)s", + {'prog': sys.argv[0], + 'version': version_info.release_string()}) + LOG.debug("command line: %s", " ".join(sys.argv)) diff --git a/fm-rest-api/fm/fm/api/controllers/__init__.py b/fm-rest-api/fm/fm/api/controllers/__init__.py new file mode 100644 index 00000000..b98b5055 --- /dev/null +++ b/fm-rest-api/fm/fm/api/controllers/__init__.py @@ -0,0 +1,5 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/fm-rest-api/fm/fm/api/controllers/root.py b/fm-rest-api/fm/fm/api/controllers/root.py new file mode 100644 index 00000000..e6c24d2c --- /dev/null +++ b/fm-rest-api/fm/fm/api/controllers/root.py @@ -0,0 +1,110 @@ +# -*- encoding: utf-8 -*- +# +# Copyright © 2012 New Dream Network, LLC (DreamHost) +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import pecan +from pecan import rest +from wsme import types as wtypes +import wsmeext.pecan as wsme_pecan + +from fm.api.controllers import v1 +from fm.api.controllers.v1 import base +from fm.api.controllers.v1 import link + +ID_VERSION = 'v1' + + +def expose(*args, **kwargs): + """Ensure that only JSON, and not XML, is supported.""" + if 'rest_content_types' not in kwargs: + kwargs['rest_content_types'] = ('json',) + return wsme_pecan.wsexpose(*args, **kwargs) + + +class Version(base.APIBase): + """An API version representation. + + This class represents an API version, including the minimum and + maximum minor versions that are supported within the major version. + """ + + id = wtypes.text + """The ID of the (major) version, also acts as the release number""" + + links = [link.Link] + """A Link that point to a specific version of the API""" + + @classmethod + def convert(cls, vid): + version = Version() + version.id = vid + version.links = [link.Link.make_link('self', pecan.request.host_url, + vid, '', bookmark=True)] + return version + + +class Root(base.APIBase): + + name = wtypes.text + """The name of the API""" + + description = wtypes.text + """Some information about this API""" + + versions = [Version] + """Links to all the versions available in this API""" + + default_version = Version + """A link to the default version of the API""" + + @staticmethod + def convert(): + root = Root() + root.name = "Fault Management API" + root.description = ("Fault Management is an OpenStack project which " + "provides REST API services for alarms and logs.") + root.default_version = Version.convert(ID_VERSION) + root.versions = [root.default_version] + return root + + +class RootController(rest.RestController): + + _versions = [ID_VERSION] + """All supported API versions""" + + _default_version = ID_VERSION + """The default API version""" + + v1 = v1.Controller() + + @expose(Root) + def get(self): + # NOTE: The reason why convert() it's being called for every + # request is because we need to get the host url from + # the request object to make the links. + return Root.convert() + + @pecan.expose() + def _route(self, args): + """Overrides the default routing behavior. + + It redirects the request to the default version of the FM API + if the version number is not specified in the url. + """ + + if args[0] and args[0] not in self._versions: + args = [self._default_version] + args + return super(RootController, self)._route(args) diff --git a/fm-rest-api/fm/fm/api/controllers/v1/__init__.py b/fm-rest-api/fm/fm/api/controllers/v1/__init__.py new file mode 100644 index 00000000..0492c24e --- /dev/null +++ b/fm-rest-api/fm/fm/api/controllers/v1/__init__.py @@ -0,0 +1,107 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import pecan +import wsmeext.pecan as wsme_pecan +from pecan import rest +from wsme import types as wtypes + +from fm.api.controllers.v1 import link +from fm.api.controllers.v1 import alarm +from fm.api.controllers.v1 import base +from fm.api.controllers.v1 import event_log +from fm.api.controllers.v1 import event_suppression + + +class MediaType(base.APIBase): + """A media type representation.""" + + base = wtypes.text + type = wtypes.text + + def __init__(self, base, type): + self.base = base + self.type = type + + +class V1(base.APIBase): + """The representation of the version 1 of the API.""" + + id = wtypes.text + "The ID of the version, also acts as the release number" + + media_types = [MediaType] + "An array of supported media types for this version" + + links = [link.Link] + "Links that point to a specific URL for this version and documentation" + + alarms = [link.Link] + "Links to the alarm resource" + + event_log = [link.Link] + "Links to the event_log resource" + + event_suppression = [link.Link] + "Links to the event_suppression resource" + + @classmethod + def convert(self): + v1 = V1() + v1.id = "v1" + v1.links = [link.Link.make_link('self', pecan.request.host_url, + 'v1', '', bookmark=True), + link.Link.make_link('describedby', + 'http://www.windriver.com', + 'developer/fm/dev', + 'api-spec-v1.html', + bookmark=True, type='text/html') + ] + v1.media_types = [MediaType('application/json', + 'application/vnd.openstack.fm.v1+json')] + + v1.alarms = [link.Link.make_link('self', pecan.request.host_url, + 'alarms', ''), + link.Link.make_link('bookmark', + pecan.request.host_url, + 'alarms', '', + bookmark=True) + ] + + v1.event_log = [link.Link.make_link('self', pecan.request.host_url, + 'event_log', ''), + link.Link.make_link('bookmark', + pecan.request.host_url, + 'event_log', '', + bookmark=True) + ] + + v1.event_suppression = [link.Link.make_link('self', + pecan.request.host_url, + 'event_suppression', ''), + link.Link.make_link('bookmark', + pecan.request.host_url, + 'event_suppression', '', + bookmark=True) + ] + + return v1 + + +class Controller(rest.RestController): + """Version 1 API controller root.""" + + alarms = alarm.AlarmController() + event_log = event_log.EventLogController() + event_suppression = event_suppression.EventSuppressionController() + + @wsme_pecan.wsexpose(V1) + def get(self): + return V1.convert() + + +__all__ = ('Controller',) diff --git a/fm-rest-api/fm/fm/api/controllers/v1/alarm.py b/fm-rest-api/fm/fm/api/controllers/v1/alarm.py new file mode 100755 index 00000000..72f47c32 --- /dev/null +++ b/fm-rest-api/fm/fm/api/controllers/v1/alarm.py @@ -0,0 +1,341 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import datetime +import pecan +from pecan import rest + +import wsme +from wsme import types as wtypes +import wsmeext.pecan as wsme_pecan +from oslo_log import log + +from fm_api import fm_api + +from fm.api.controllers.v1 import base +from fm.api.controllers.v1 import collection +from fm.api.controllers.v1 import link +from fm.api.controllers.v1 import types +from fm.api.controllers.v1 import utils as api_utils +from fm.common import exceptions +from fm.common import constants +from fm import objects +from fm.api.controllers.v1.query import Query +from fm.api.controllers.v1.sysinv import cgtsclient + +from fm_api import constants as fm_constants + + +LOG = log.getLogger(__name__) + + +class AlarmPatchType(types.JsonPatchType): + pass + + +class Alarm(base.APIBase): + """API representation of an alarm. + + This class enforces type checking and value constraints, and converts + between the internal object model and the API representation of + an alarm. + """ + + uuid = types.uuid + "The UUID of the alarm" + + alarm_id = wsme.wsattr(wtypes.text, mandatory=True) + "structured id for the alarm; AREA_ID ID; 300-001" + + alarm_state = wsme.wsattr(wtypes.text, mandatory=True) + "The state of the alarm" + + entity_type_id = wtypes.text + "The type of the object raising alarm" + + entity_instance_id = wsme.wsattr(wtypes.text, mandatory=True) + "The original instance information of the object raising alarm" + + timestamp = datetime.datetime + "The time in UTC at which the alarm state is last updated" + + severity = wsme.wsattr(wtypes.text, mandatory=True) + "The severity of the alarm" + + reason_text = wtypes.text + "The reason why the alarm is raised" + + alarm_type = wsme.wsattr(wtypes.text, mandatory=True) + "The type of the alarm" + + probable_cause = wsme.wsattr(wtypes.text, mandatory=True) + "The probable cause of the alarm" + + proposed_repair_action = wtypes.text + "The action to clear the alarm" + + service_affecting = wtypes.text + "Whether the alarm affects the service" + + suppression = wtypes.text + "'allowed' or 'not-allowed'" + + suppression_status = wtypes.text + "'suppressed' or 'unsuppressed'" + + mgmt_affecting = wtypes.text + "Whether the alarm prevents software management actions" + + degrade_affecting = wtypes.text + "Wheter the alarm prevents filesystem resize actions" + + links = [link.Link] + "A list containing a self link and associated alarm links" + + def __init__(self, **kwargs): + self.fields = objects.alarm.fields.keys() + for k in self.fields: + setattr(self, k, kwargs.get(k)) + + @classmethod + def convert_with_links(cls, rpc_ialarm, expand=True): + if isinstance(rpc_ialarm, tuple): + alarms = rpc_ialarm[0] + suppress_status = rpc_ialarm[constants.DB_SUPPRESS_STATUS] + mgmt_affecting = rpc_ialarm[constants.DB_MGMT_AFFECTING] + degrade_affecting = rpc_ialarm[constants.DB_DEGRADE_AFFECTING] + else: + alarms = rpc_ialarm + suppress_status = rpc_ialarm.suppression_status + mgmt_affecting = rpc_ialarm.mgmt_affecting + degrade_affecting = rpc_ialarm.degrade_affecting + + alarms['service_affecting'] = str(alarms['service_affecting']) + alarms['suppression'] = str(alarms['suppression']) + + alm = Alarm(**alarms.as_dict()) + if not expand: + alm.unset_fields_except(['uuid', 'alarm_id', 'entity_instance_id', + 'severity', 'timestamp', 'reason_text', + 'mgmt_affecting ', 'degrade_affecting']) + + alm.entity_instance_id = \ + api_utils.make_display_id(alm.entity_instance_id, replace=False) + + alm.suppression_status = str(suppress_status) + + alm.mgmt_affecting = str( + not fm_api.FaultAPIs.alarm_allowed(alm.severity, mgmt_affecting)) + + alm.degrade_affecting = str( + not fm_api.FaultAPIs.alarm_allowed(alm.severity, degrade_affecting)) + + return alm + + +class AlarmCollection(collection.Collection): + """API representation of a collection of alarm.""" + + alarms = [Alarm] + "A list containing alarm objects" + + def __init__(self, **kwargs): + self._type = 'alarms' + + @classmethod + def convert_with_links(cls, ialm, limit, url=None, + expand=False, **kwargs): + # filter masked alarms + ialms = [] + for a in ialm: + if isinstance(a, tuple): + ialm_instance = a[0] + else: + ialm_instance = a + if str(ialm_instance['masked']) != 'True': + ialms.append(a) + + collection = AlarmCollection() + collection.alarms = [Alarm.convert_with_links(ch, expand) + for ch in ialms] + # url = url or None + collection.next = collection.get_next(limit, url=url, **kwargs) + return collection + + +LOCK_NAME = 'AlarmController' + + +class AlarmSummary(base.APIBase): + """API representation of an alarm summary object.""" + + critical = wsme.wsattr(int, mandatory=True) + "The count of critical alarms" + + major = wsme.wsattr(int, mandatory=True) + "The count of major alarms" + + minor = wsme.wsattr(int, mandatory=True) + "The count of minor alarms" + + warnings = wsme.wsattr(int, mandatory=True) + "The count of warnings" + + status = wsme.wsattr(wtypes.text, mandatory=True) + "The status of the system" + + system_uuid = wsme.wsattr(types.uuid, mandatory=True) + "The UUID of the system (for distributed cloud use)" + + @classmethod + def convert_with_links(cls, ialm_sum, uuid): + summary = AlarmSummary() + summary.critical = ialm_sum[fm_constants.FM_ALARM_SEVERITY_CRITICAL] + summary.major = ialm_sum[fm_constants.FM_ALARM_SEVERITY_MAJOR] + summary.minor = ialm_sum[fm_constants.FM_ALARM_SEVERITY_MINOR] + summary.warnings = ialm_sum[fm_constants.FM_ALARM_SEVERITY_WARNING] + summary.status = ialm_sum['status'] + summary.system_uuid = uuid + return summary + + +class AlarmController(rest.RestController): + """REST controller for alarm.""" + + _custom_actions = { + 'detail': ['GET'], + 'summary': ['GET'], + } + + def _get_alarm_summary(self, include_suppress): + kwargs = {} + kwargs["include_suppress"] = include_suppress + ialm = pecan.request.dbapi.alarm_get_all(**kwargs) + ialm_counts = {fm_constants.FM_ALARM_SEVERITY_CRITICAL: 0, + fm_constants.FM_ALARM_SEVERITY_MAJOR: 0, + fm_constants.FM_ALARM_SEVERITY_MINOR: 0, + fm_constants.FM_ALARM_SEVERITY_WARNING: 0} + # filter masked alarms and sum by severity + for a in ialm: + ialm_instance = a[0] + if str(ialm_instance['masked']) != 'True': + if ialm_instance['severity'] in ialm_counts: + ialm_counts[ialm_instance['severity']] += 1 + + # Generate the status + status = fm_constants.FM_ALARM_OK_STATUS + if (ialm_counts[fm_constants.FM_ALARM_SEVERITY_MAJOR] > 0) or \ + (ialm_counts[fm_constants.FM_ALARM_SEVERITY_MINOR] > 0): + status = fm_constants.FM_ALARM_DEGRADED_STATUS + if ialm_counts[fm_constants.FM_ALARM_SEVERITY_CRITICAL] > 0: + status = fm_constants.FM_ALARM_CRITICAL_STATUS + ialm_counts['status'] = status + + system = cgtsclient(pecan.request.context).isystem.list()[0] + uuid = system.uuid + + return AlarmSummary.convert_with_links(ialm_counts, uuid) + + def _get_alarm_collection(self, marker, limit, sort_key, sort_dir, + expand=False, resource_url=None, + q=None, include_suppress=False): + limit = api_utils.validate_limit(limit) + sort_dir = api_utils.validate_sort_dir(sort_dir) + if isinstance(sort_key, str) and ',' in sort_key: + sort_key = sort_key.split(',') + + kwargs = {} + if q is not None: + for i in q: + if i.op == 'eq': + kwargs[i.field] = i.value + + kwargs["include_suppress"] = include_suppress + + if marker: + + marker_obj = objects.alarm.get_by_uuid(pecan.request.context, + marker) + ialm = pecan.request.dbapi.alarm_get_list( + limit, marker_obj, + sort_key=sort_key, + sort_dir=sort_dir, + include_suppress=include_suppress) + else: + kwargs['limit'] = limit + ialm = pecan.request.dbapi.alarm_get_all(**kwargs) + + return AlarmCollection.convert_with_links(ialm, limit, + url=resource_url, + expand=expand, + sort_key=sort_key, + sort_dir=sort_dir) + + @wsme_pecan.wsexpose(AlarmCollection, [Query], + types.uuid, int, wtypes.text, wtypes.text, bool) + def get_all(self, q=[], marker=None, limit=None, sort_key='id', + sort_dir='asc', include_suppress=False): + """Retrieve a list of alarm. + + :param marker: pagination marker for large data sets. + :param limit: maximum number of resources to return in a single result. + :param sort_key: column to sort results by. Default: id. + :param sort_dir: direction to sort. "asc" or "desc". Default: asc. + :param include_suppress: filter on suppressed alarms. Default: False + """ + return self._get_alarm_collection(marker, limit, sort_key, + sort_dir, q=q, + include_suppress=include_suppress) + + @wsme_pecan.wsexpose(AlarmCollection, types.uuid, int, + wtypes.text, wtypes.text) + def detail(self, marker=None, limit=None, sort_key='id', sort_dir='asc'): + """Retrieve a list of alarm with detail. + + :param marker: pagination marker for large data sets. + :param limit: maximum number of resources to return in a single result. + :param sort_key: column to sort results by. Default: id. + :param sort_dir: direction to sort. "asc" or "desc". Default: asc. + """ + # /detail should only work agaist collections + parent = pecan.request.path.split('/')[:-1][-1] + if parent != "alarm": + raise exceptions.HTTPNotFound + + expand = True + resource_url = '/'.join(['alarm', 'detail']) + return self._get_alarm_collection(marker, limit, sort_key, sort_dir, + expand, resource_url) + + @wsme_pecan.wsexpose(Alarm, wtypes.text) + def get_one(self, id): + """Retrieve information about the given alarm. + + :param id: UUID of an alarm. + """ + rpc_ialarm = objects.alarm.get_by_uuid( + pecan.request.context, id) + if str(rpc_ialarm['masked']) == 'True': + raise exceptions.HTTPNotFound + + return Alarm.convert_with_links(rpc_ialarm) + + @wsme_pecan.wsexpose(None, wtypes.text, status_code=204) + def delete(self, id): + """Delete an alarm. + + :param id: uuid of an alarm. + """ + pecan.request.dbapi.alarm_destroy(id) + + @wsme_pecan.wsexpose(AlarmSummary, bool) + def summary(self, include_suppress=False): + """Retrieve a summery of alarms. + + :param include_suppress: filter on suppressed alarms. Default: False + """ + return self._get_alarm_summary(include_suppress) diff --git a/fm-rest-api/fm/fm/api/controllers/v1/base.py b/fm-rest-api/fm/fm/api/controllers/v1/base.py new file mode 100644 index 00000000..9755b70b --- /dev/null +++ b/fm-rest-api/fm/fm/api/controllers/v1/base.py @@ -0,0 +1,131 @@ +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import datetime +import functools + +from webob import exc +import wsme +from wsme import types as wtypes +from oslo_utils._i18n import _ + + +class APIBase(wtypes.Base): + + created_at = wsme.wsattr(datetime.datetime, readonly=True) + """The time in UTC at which the object is created""" + + updated_at = wsme.wsattr(datetime.datetime, readonly=True) + """The time in UTC at which the object is updated""" + + def as_dict(self): + """Render this object as a dict of its fields.""" + return dict((k, getattr(self, k)) + for k in self.fields + if hasattr(self, k) and + getattr(self, k) != wsme.Unset) + + def unset_fields_except(self, except_list=None): + """Unset fields so they don't appear in the message body. + + :param except_list: A list of fields that won't be touched. + + """ + if except_list is None: + except_list = [] + + for k in self.as_dict(): + if k not in except_list: + setattr(self, k, wsme.Unset) + + @classmethod + def from_rpc_object(cls, m, fields=None): + """Convert a RPC object to an API object.""" + obj_dict = m.as_dict() + # Unset non-required fields so they do not appear + # in the message body + obj_dict.update(dict((k, wsme.Unset) + for k in obj_dict.keys() + if fields and k not in fields)) + return cls(**obj_dict) + + +@functools.total_ordering +class Version(object): + """API Version object.""" + + string = 'X-OpenStack-FM-API-Version' + """HTTP Header string carrying the requested version""" + + min_string = 'X-OpenStack-FM-API-Minimum-Version' + """HTTP response header""" + + max_string = 'X-OpenStack-FM-API-Maximum-Version' + """HTTP response header""" + + def __init__(self, headers, default_version, latest_version): + """Create an API Version object from the supplied headers. + + :param headers: webob headers + :param default_version: version to use if not specified in headers + :param latest_version: version to use if latest is requested + :raises: webob.HTTPNotAcceptable + """ + (self.major, self.minor) = Version.parse_headers( + headers, default_version, latest_version) + + def __repr__(self): + return '%s.%s' % (self.major, self.minor) + + @staticmethod + def parse_headers(headers, default_version, latest_version): + """Determine the API version requested based on the headers supplied. + + :param headers: webob headers + :param default_version: version to use if not specified in headers + :param latest_version: version to use if latest is requested + :returns: a tupe of (major, minor) version numbers + :raises: webob.HTTPNotAcceptable + """ + version_str = headers.get(Version.string, default_version) + + if version_str.lower() == 'latest': + parse_str = latest_version + else: + parse_str = version_str + + try: + version = tuple(int(i) for i in parse_str.split('.')) + except ValueError: + version = () + + if len(version) != 2: + raise exc.HTTPNotAcceptable(_( + "Invalid value for %s header") % Version.string) + return version + + def __gt__(self, other): + return (self.major, self.minor) > (other.major, other.minor) + + def __eq__(self, other): + return (self.major, self.minor) == (other.major, other.minor) + + def __ne__(self, other): + return not self.__eq__(other) diff --git a/fm-rest-api/fm/fm/api/controllers/v1/collection.py b/fm-rest-api/fm/fm/api/controllers/v1/collection.py new file mode 100644 index 00000000..51b794e4 --- /dev/null +++ b/fm-rest-api/fm/fm/api/controllers/v1/collection.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2013 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import pecan +from wsme import types as wtypes + +from fm.api.controllers.v1 import base +from fm.api.controllers.v1 import link + + +class Collection(base.APIBase): + + next = wtypes.text + "A link to retrieve the next subset of the collection" + + @property + def collection(self): + return getattr(self, self._type) + + def has_next(self, limit): + """Return whether collection has more items.""" + return len(self.collection) and len(self.collection) == limit + + def get_next(self, limit, url=None, **kwargs): + """Return a link to the next subset of the collection.""" + if not self.has_next(limit): + return wtypes.Unset + + resource_url = url or self._type + q_args = ''.join(['%s=%s&' % (key, kwargs[key]) for key in kwargs]) + next_args = '?%(args)slimit=%(limit)d&marker=%(marker)s' % { + 'args': q_args, 'limit': limit, + 'marker': self.collection[-1].uuid} + + return link.Link.make_link('next', pecan.request.host_url, + resource_url, next_args).href diff --git a/fm-rest-api/fm/fm/api/controllers/v1/event_log.py b/fm-rest-api/fm/fm/api/controllers/v1/event_log.py new file mode 100644 index 00000000..9f4a804f --- /dev/null +++ b/fm-rest-api/fm/fm/api/controllers/v1/event_log.py @@ -0,0 +1,292 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import json +import datetime +from oslo_utils import timeutils +from oslo_log import log + +import pecan +from pecan import rest + +import wsme +from wsme import types as wtypes +import wsmeext.pecan as wsme_pecan + +from fm import objects +from fm.api.controllers.v1 import utils +from fm.api.controllers.v1 import base +from fm.api.controllers.v1 import collection +from fm.api.controllers.v1 import link +from fm.api.controllers.v1.query import Query +from fm.api.controllers.v1 import types +from fm.common import exceptions +from fm.common.i18n import _ + +LOG = log.getLogger(__name__) + + +def prettyDict(dict): + output = json.dumps(dict, sort_keys=True, indent=4) + return output + + +class EventLogPatchType(types.JsonPatchType): + pass + + +class EventLog(base.APIBase): + """API representation of an event log. + + This class enforces type checking and value constraints, and converts + between the internal object model and the API representation of + a event_log. + """ + + uuid = types.uuid + "The UUID of the event_log" + + event_log_id = wsme.wsattr(wtypes.text, mandatory=True) + "structured id for the event log; AREA_ID ID; 300-001" + + state = wsme.wsattr(wtypes.text, mandatory=True) + "The state of the event" + + entity_type_id = wtypes.text + "The type of the object event log" + + entity_instance_id = wsme.wsattr(wtypes.text, mandatory=True) + "The original instance information of the object creating event log" + + timestamp = datetime.datetime + "The time in UTC at which the event log is generated" + + severity = wsme.wsattr(wtypes.text, mandatory=True) + "The severity of the log" + + reason_text = wtypes.text + "The reason why the log is generated" + + event_log_type = wsme.wsattr(wtypes.text, mandatory=True) + "The type of the event log" + + probable_cause = wsme.wsattr(wtypes.text, mandatory=True) + "The probable cause of the event log" + + proposed_repair_action = wtypes.text + "The action to clear the alarm" + + service_affecting = wtypes.text + "Whether the log affects the service" + + suppression = wtypes.text + "'allowed' or 'not-allowed'" + + suppression_status = wtypes.text + "'suppressed' or 'unsuppressed'" + + links = [link.Link] + "A list containing a self link and associated event links" + + def __init__(self, **kwargs): + + self.fields = objects.event_log.fields.keys() + for k in self.fields: + setattr(self, k, kwargs.get(k)) + + @classmethod + def convert_with_links(cls, rpc_event_log, expand=True): + + if isinstance(rpc_event_log, tuple): + ievent_log = rpc_event_log[0] + suppress_status = rpc_event_log[1] + else: + ievent_log = rpc_event_log + suppress_status = rpc_event_log.suppression_status + + ievent_log['service_affecting'] = str(ievent_log['service_affecting']) + ievent_log['suppression'] = str(ievent_log['suppression']) + + ilog = EventLog(**ievent_log.as_dict()) + if not expand: + ilog.unset_fields_except(['uuid', 'event_log_id', 'entity_instance_id', + 'severity', 'timestamp', 'reason_text', 'state']) + + ilog.entity_instance_id = \ + utils.make_display_id(ilog.entity_instance_id, replace=False) + + ilog.suppression_status = str(suppress_status) + + return ilog + + +def _getEventType(alarms=False, logs=False): + if not alarms and not logs: + return "ALL" + if alarms and logs: + return "ALL" + if logs: + return "LOG" + if alarms: + return "ALARM" + return "ALL" + + +class EventLogCollection(collection.Collection): + """API representation of a collection of event_log.""" + + event_log = [EventLog] + "A list containing event_log objects" + + def __init__(self, **kwargs): + self._type = 'event_log' + + @classmethod + def convert_with_links(cls, ilog, limit=None, url=None, + expand=False, **kwargs): + + ilogs = [] + for a in ilog: + ilogs.append(a) + + collection = EventLogCollection() + collection.event_log = [EventLog.convert_with_links(ch, expand) + for ch in ilogs] + + collection.next = collection.get_next(limit, url=url, **kwargs) + return collection + + +def _handle_bad_input_date(f): + """ + A decorator that executes function f and returns + a more human readable error message on a SQL date exception + """ + def date_handler_wrapper(*args, **kwargs): + try: + return f(*args, **kwargs) + except Exception as e: + import re + e_str = "{}".format(e) + for r in [".*date/time field value out of range: \"(.*)\".*LINE", + ".*invalid input syntax for type timestamp: \"(.*)\".*", + ".*timestamp out of range: \"(.*)\".*"]: + p = re.compile(r, re.DOTALL) + m = p.match(e_str) + if m and len(m.groups()) > 0: + bad_date = m.group(1) + raise wsme.exc.ClientSideError(_( + "Invalid date '{}' specified".format(bad_date))) + raise + return date_handler_wrapper + + +class EventLogController(rest.RestController): + """REST controller for eventlog.""" + + _custom_actions = { + 'detail': ['GET'], + } + + @_handle_bad_input_date + def _get_eventlog_collection(self, marker, limit, sort_key, sort_dir, + expand=False, resource_url=None, + q=None, alarms=False, logs=False, + include_suppress=False): + + if limit and limit < 0: + raise wsme.exc.ClientSideError(_("Limit must be positive")) + sort_dir = utils.validate_sort_dir(sort_dir) + kwargs = {} + if q is not None: + for i in q: + if i.op == 'eq': + if i.field == 'start' or i.field == 'end': + val = timeutils.normalize_time( + timeutils.parse_isotime(i.value) + .replace(tzinfo=None)) + i.value = val.isoformat() + kwargs[i.field] = i.value + + evtType = _getEventType(alarms, logs) + kwargs["evtType"] = evtType + kwargs["include_suppress"] = include_suppress + + if marker: + marker_obj = objects.event_log.get_by_uuid(pecan.request.context, + marker) + + ilog = pecan.request.dbapi.event_log_get_list( + limit, marker_obj, + sort_key=sort_key, + sort_dir=sort_dir, + evtType=evtType, + include_suppress=include_suppress) + else: + kwargs['limit'] = limit + ilog = pecan.request.dbapi.event_log_get_all(**kwargs) + + return EventLogCollection.convert_with_links(ilog, limit, + url=resource_url, + expand=expand, + sort_key=sort_key, + sort_dir=sort_dir) + + @wsme_pecan.wsexpose(EventLogCollection, [Query], + types.uuid, int, wtypes.text, wtypes.text, + bool, bool, bool) + def get_all(self, q=[], marker=None, limit=None, sort_key='timestamp', + sort_dir='desc', alarms=False, logs=False, + include_suppress=False): + """Retrieve a list of event_log. + + :param marker: pagination marker for large data sets. + :param limit: maximum number of resources to return in a single result. + :param sort_key: column to sort results by. Default: id. + :param sort_dir: direction to sort. "asc" or "desc". Default: asc. + :param alarms: filter on alarms. Default: False + :param logs: filter on logs. Default: False + :param include_suppress: filter on suppressed alarms. Default: False + """ + return self._get_eventlog_collection(marker, limit, sort_key, + sort_dir, q=q, alarms=alarms, + logs=logs, + include_suppress=include_suppress) + + @wsme_pecan.wsexpose(EventLogCollection, types.uuid, int, + wtypes.text, wtypes.text, bool, bool) + def detail(self, marker=None, limit=None, sort_key='id', sort_dir='asc', + alarms=False, logs=False): + """Retrieve a list of event_log with detail. + + :param marker: pagination marker for large data sets. + :param limit: maximum number of resources to return in a single result. + :param sort_key: column to sort results by. Default: id. + :param sort_dir: direction to sort. "asc" or "desc". Default: asc. + :param alarms: filter on alarms. Default: False + :param logs: filter on logs. Default: False + """ + # /detail should only work against collections + parent = pecan.request.path.split('/')[:-1][-1] + if parent != "event_log": + raise exceptions.HTTPNotFound + + expand = True + resource_url = '/'.join(['event_log', 'detail']) + return self._get_eventlog_collection(marker, limit, sort_key, sort_dir, + expand, resource_url, None, + alarms, logs) + + @wsme_pecan.wsexpose(EventLog, wtypes.text) + def get_one(self, id): + """Retrieve information about the given event_log. + + :param id: UUID of an event_log. + """ + rpc_ilog = objects.event_log.get_by_uuid( + pecan.request.context, id) + + return EventLog.convert_with_links(rpc_ilog) diff --git a/fm-rest-api/fm/fm/api/controllers/v1/event_suppression.py b/fm-rest-api/fm/fm/api/controllers/v1/event_suppression.py new file mode 100644 index 00000000..3670dedb --- /dev/null +++ b/fm-rest-api/fm/fm/api/controllers/v1/event_suppression.py @@ -0,0 +1,215 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import pecan +from pecan import rest + +import wsme +from wsme import types as wtypes +import wsmeext.pecan as wsme_pecan +from oslo_log import log + +from fm import objects +from fm.api.controllers.v1 import base +from fm.api.controllers.v1 import collection +from fm.api.controllers.v1 import link +from fm.api.controllers.v1.query import Query +from fm.api.controllers.v1 import types +from fm.api.controllers.v1 import utils as api_utils +from fm.common import constants +from fm.common import utils as cutils +from fm.common.i18n import _ + +LOG = log.getLogger(__name__) + + +class EventSuppressionPatchType(types.JsonPatchType): + @staticmethod + def mandatory_attrs(): + return ['/uuid'] + + +class EventSuppression(base.APIBase): + """API representation of an event suppression. + + This class enforces type checking and value constraints, and converts + between the internal object model and the API representation of + an event_suppression. + """ + + id = int + "Unique ID for this entry" + + uuid = types.uuid + "Unique UUID for this entry" + + alarm_id = wsme.wsattr(wtypes.text, mandatory=True) + "Unique id for the Alarm Type" + + description = wsme.wsattr(wtypes.text, mandatory=True) + "Text description of the Alarm Type" + + suppression_status = wsme.wsattr(wtypes.text, mandatory=True) + "'suppressed' or 'unsuppressed'" + + links = [link.Link] + "A list containing a self link and associated links" + + def __init__(self, **kwargs): + self.fields = objects.event_suppression.fields.keys() + for k in self.fields: + if not hasattr(self, k): + continue + setattr(self, k, kwargs.get(k, wtypes.Unset)) + + @classmethod + def convert_with_links(cls, rpc_event_suppression, expand=True): + parm = EventSuppression(**rpc_event_suppression.as_dict()) + + if not expand: + parm.unset_fields_except(['uuid', 'alarm_id', 'description', + 'suppression_status']) + + parm.links = [link.Link.make_link('self', pecan.request.host_url, + 'event_suppression', parm.uuid), + link.Link.make_link('bookmark', + pecan.request.host_url, + 'event_suppression', parm.uuid, + bookmark=True) + ] + return parm + + +class EventSuppressionCollection(collection.Collection): + """API representation of a collection of event_suppression.""" + + event_suppression = [EventSuppression] + "A list containing EventSuppression objects" + + def __init__(self, **kwargs): + self._type = 'event_suppression' + + @classmethod + def convert_with_links(cls, rpc_event_suppression, limit, url=None, + expand=False, + **kwargs): + collection = EventSuppressionCollection() + collection.event_suppression = [EventSuppression.convert_with_links(p, expand) + for p in rpc_event_suppression] + collection.next = collection.get_next(limit, url=url, **kwargs) + return collection + + +LOCK_NAME = 'EventSuppressionController' + + +class EventSuppressionController(rest.RestController): + """REST controller for event_suppression.""" + + def __init__(self, parent=None, **kwargs): + self._parent = parent + + def _get_event_suppression_collection(self, marker=None, limit=None, + sort_key=None, sort_dir=None, + expand=False, resource_url=None, + q=None): + limit = api_utils.validate_limit(limit) + sort_dir = api_utils.validate_sort_dir(sort_dir) + kwargs = {} + if q is not None: + for i in q: + if i.op == 'eq': + kwargs[i.field] = i.value + marker_obj = None + if marker: + marker_obj = objects.event_suppression.get_by_uuid( + pecan.request.context, marker) + + if q is None: + parms = pecan.request.dbapi.event_suppression_get_list( + limit=limit, marker=marker_obj, + sort_key=sort_key, sort_dir=sort_dir) + else: + kwargs['limit'] = limit + kwargs['sort_key'] = sort_key + kwargs['sort_dir'] = sort_dir + + parms = pecan.request.dbapi.event_suppression_get_all(**kwargs) + + return EventSuppressionCollection.convert_with_links( + parms, limit, url=resource_url, expand=expand, + sort_key=sort_key, sort_dir=sort_dir) + + def _get_updates(self, patch): + """Retrieve the updated attributes from the patch request.""" + updates = {} + for p in patch: + attribute = p['path'] if p['path'][0] != '/' else p['path'][1:] + updates[attribute] = p['value'] + return updates + + @staticmethod + def _check_event_suppression_updates(updates): + """Check attributes to be updated""" + + for parameter in updates: + if parameter == 'suppression_status': + if not((updates.get(parameter) == constants.FM_SUPPRESSED) or + (updates.get(parameter) == constants.FM_UNSUPPRESSED)): + msg = _("Invalid event_suppression parameter " + "suppression_status values. Valid values are: " + "suppressed, unsuppressed") + raise wsme.exc.ClientSideError(msg) + elif parameter == 'alarm_id': + msg = _("event_suppression parameter alarm_id is not allowed " + "to be updated.") + raise wsme.exc.ClientSideError(msg) + elif parameter == 'description': + msg = _("event_suppression parameter description is not " + "allowed to be updated.") + raise wsme.exc.ClientSideError(msg) + else: + msg = _("event_suppression invalid parameter.") + raise wsme.exc.ClientSideError(msg) + + @wsme_pecan.wsexpose(EventSuppressionCollection, [Query], + types.uuid, wtypes.text, + wtypes.text, wtypes.text, wtypes.text) + def get_all(self, q=[], marker=None, limit=None, + sort_key='id', sort_dir='asc'): + """Retrieve a list of event_suppression.""" + sort_key = ['alarm_id'] + return self._get_event_suppression_collection(marker, limit, + sort_key, + sort_dir, q=q) + + @wsme_pecan.wsexpose(EventSuppression, types.uuid) + def get_one(self, uuid): + """Retrieve information about the given event_suppression.""" + rpc_event_suppression = objects.event_suppression.get_by_uuid( + pecan.request.context, uuid) + return EventSuppression.convert_with_links(rpc_event_suppression) + + @cutils.synchronized(LOCK_NAME) + @wsme.validate(types.uuid, [EventSuppressionPatchType]) + @wsme_pecan.wsexpose(EventSuppression, types.uuid, + body=[EventSuppressionPatchType]) + def patch(self, uuid, patch): + """Updates attributes of event_suppression.""" + event_suppression = objects.event_suppression.get_by_uuid( + pecan.request.context, uuid) + event_suppression = event_suppression.as_dict() + + updates = self._get_updates(patch) + self._check_event_suppression_updates(updates) + + event_suppression.update(updates) + + updated_event_suppression = \ + pecan.request.dbapi.event_suppression_update(uuid, updates) + + return EventSuppression.convert_with_links(updated_event_suppression) diff --git a/fm-rest-api/fm/fm/api/controllers/v1/link.py b/fm-rest-api/fm/fm/api/controllers/v1/link.py new file mode 100644 index 00000000..1987c981 --- /dev/null +++ b/fm-rest-api/fm/fm/api/controllers/v1/link.py @@ -0,0 +1,58 @@ +# Copyright 2013 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import pecan +from wsme import types as wtypes + +from fm.api.controllers.v1 import base + + +def build_url(resource, resource_args, bookmark=False, base_url=None): + if base_url is None: + base_url = pecan.request.public_url + + template = '%(url)s/%(res)s' if bookmark else '%(url)s/v1/%(res)s' + # FIXME(lucasagomes): I'm getting a 404 when doing a GET on + # a nested resource that the URL ends with a '/'. + # https://groups.google.com/forum/#!topic/pecan-dev/QfSeviLg5qs + template += '%(args)s' if resource_args.startswith('?') else '/%(args)s' + return template % {'url': base_url, 'res': resource, 'args': resource_args} + + +class Link(base.APIBase): + """A link representation.""" + + href = wtypes.text + """The url of a link.""" + + rel = wtypes.text + """The name of a link.""" + + type = wtypes.text + """Indicates the type of document/link.""" + + @staticmethod + def make_link(rel_name, url, resource, resource_args, + bookmark=False, type=wtypes.Unset): + href = build_url(resource, resource_args, + bookmark=bookmark, base_url=url) + return Link(href=href, rel=rel_name, type=type) + + @classmethod + def sample(cls): + sample = cls(href="http://localhost:18002" + "eeaca217-e7d8-47b4-bb41-3f99f20ead81", + rel="bookmark") + return sample diff --git a/fm-rest-api/fm/fm/api/controllers/v1/query.py b/fm-rest-api/fm/fm/api/controllers/v1/query.py new file mode 100644 index 00000000..d428427e --- /dev/null +++ b/fm-rest-api/fm/fm/api/controllers/v1/query.py @@ -0,0 +1,176 @@ +# coding: utf-8 +# Copyright © 2012 New Dream Network, LLC (DreamHost) +# Copyright 2013 IBM Corp. +# Copyright © 2013 eNovance +# Copyright Ericsson AB 2013. All rights reserved +# +# Authors: Doug Hellmann +# Angus Salkeld +# Eoghan Glynn +# Julien Danjou +# Ildiko Vancsa +# Balazs Gibizer +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import inspect +import functools +import six +import ast + +import wsme +from wsme import types as wtypes +from oslo_utils import strutils +from oslo_utils import timeutils +from oslo_log import log +from fm.common.i18n import _ + +LOG = log.getLogger(__name__) + +operation_kind = wtypes.Enum(str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt') + + +class _Base(wtypes.Base): + + @classmethod + def from_db_model(cls, m): + return cls(**(m.as_dict())) + + @classmethod + def from_db_and_links(cls, m, links): + return cls(links=links, **(m.as_dict())) + + def as_dict(self, db_model): + valid_keys = inspect.getargspec(db_model.__init__)[0] + if 'self' in valid_keys: + valid_keys.remove('self') + return self.as_dict_from_keys(valid_keys) + + def as_dict_from_keys(self, keys): + return dict((k, getattr(self, k)) + for k in keys + if hasattr(self, k) and + getattr(self, k) != wsme.Unset) + + +class Query(_Base): + """Query filter. + """ + + # The data types supported by the query. + _supported_types = ['integer', 'float', 'string', 'boolean'] + + # Functions to convert the data field to the correct type. + _type_converters = {'integer': int, + 'float': float, + 'boolean': functools.partial( + strutils.bool_from_string, strict=True), + 'string': six.text_type, + 'datetime': timeutils.parse_isotime} + + _op = None # provide a default + + def get_op(self): + return self._op or 'eq' + + def set_op(self, value): + self._op = value + + field = wtypes.text + "The name of the field to test" + + # op = wsme.wsattr(operation_kind, default='eq') + # this ^ doesn't seem to work. + op = wsme.wsproperty(operation_kind, get_op, set_op) + "The comparison operator. Defaults to 'eq'." + + value = wtypes.text + "The value to compare against the stored data" + + type = wtypes.text + "The data type of value to compare against the stored data" + + def __repr__(self): + # for logging calls + return '' % (self.field, + self.op, + self.value, + self.type) + + @classmethod + def sample(cls): + return cls(field='resource_id', + op='eq', + value='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36', + type='string' + ) + + def as_dict(self): + return self.as_dict_from_keys(['field', 'op', 'type', 'value']) + + def _get_value_as_type(self, forced_type=None): + """Convert metadata value to the specified data type. + + This method is called during metadata query to help convert the + querying metadata to the data type specified by user. If there is no + data type given, the metadata will be parsed by ast.literal_eval to + try to do a smart converting. + + NOTE (flwang) Using "_" as prefix to avoid an InvocationError raised + from wsmeext/sphinxext.py. It's OK to call it outside the Query class. + Because the "public" side of that class is actually the outside of the + API, and the "private" side is the API implementation. The method is + only used in the API implementation, so it's OK. + + :returns: metadata value converted with the specified data type. + """ + type = forced_type or self.type + try: + converted_value = self.value + if not type: + try: + converted_value = ast.literal_eval(self.value) + except (ValueError, SyntaxError): + msg = _('Failed to convert the metadata value %s' + ' automatically') % (self.value) + LOG.debug(msg) + else: + if type not in self._supported_types: + # Types must be explicitly declared so the + # correct type converter may be used. Subclasses + # of Query may define _supported_types and + # _type_converters to define their own types. + raise TypeError() + converted_value = self._type_converters[type](self.value) + except ValueError: + msg = _('Failed to convert the value %(value)s' + ' to the expected data type %(type)s.') % \ + {'value': self.value, 'type': type} + raise wsme.exc.ClientSideError(msg) + except TypeError: + msg = _('The data type %(type)s is not supported. The supported' + ' data type list is: %(supported)s') % \ + {'type': type, 'supported': self._supported_types} + raise wsme.exc.ClientSideError(msg) + except Exception: + msg = _('Unexpected exception converting %(value)s to' + ' the expected data type %(type)s.') % \ + {'value': self.value, 'type': type} + raise wsme.exc.ClientSideError(msg) + return converted_value diff --git a/fm-rest-api/fm/fm/api/controllers/v1/sysinv.py b/fm-rest-api/fm/fm/api/controllers/v1/sysinv.py new file mode 100644 index 00000000..b0972484 --- /dev/null +++ b/fm-rest-api/fm/fm/api/controllers/v1/sysinv.py @@ -0,0 +1,49 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +from oslo_config import cfg +from oslo_log import log +from keystoneauth1 import loading as ks_loading +from cgtsclient.v1 import client as cgts_client +from fm.api import config + + +CONF = cfg.CONF + +LOG = log.getLogger(__name__) + +_SESSION = None + + +def cgtsclient(context, version=1, endpoint=None): + """Constructs a cgts client object for making API requests. + + :param context: The FM request context for auth. + :param version: API endpoint version. + :param endpoint: Optional If the endpoint is not available, it will be + retrieved from session + """ + global _SESSION + + if not _SESSION: + _SESSION = ks_loading.load_session_from_conf_options( + CONF, config.sysinv_group.name) + + auth_token = context.auth_token + if endpoint is None: + auth = context.get_auth_plugin() + service_type, service_name, interface = \ + CONF.sysinv.catalog_info.split(':') + service_parameters = {'service_type': service_type, + 'service_name': service_name, + 'interface': interface, + 'region_name': CONF.sysinv.os_region_name} + endpoint = _SESSION.get_endpoint(auth, **service_parameters) + + return cgts_client.Client(version=version, + endpoint=endpoint, + token=auth_token) diff --git a/fm-rest-api/fm/fm/api/controllers/v1/types.py b/fm-rest-api/fm/fm/api/controllers/v1/types.py new file mode 100644 index 00000000..a553abb1 --- /dev/null +++ b/fm-rest-api/fm/fm/api/controllers/v1/types.py @@ -0,0 +1,173 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 +# coding: utf-8 +# +# Copyright 2013 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import inspect +import json +import six + +import wsme +from wsme import types as wtypes +from oslo_utils import strutils +from oslo_utils import uuidutils + +from fm.common.i18n import _ +from fm.common import exceptions + + +class UuidType(wtypes.UserType): + """A simple UUID type.""" + + basetype = wtypes.text + name = 'uuid' + + @staticmethod + def validate(value): + if not uuidutils.is_uuid_like(value): + raise exceptions.Invalid(uuid=value) + return value + + @staticmethod + def frombasetype(value): + if value is None: + return None + return UuidType.validate(value) + + +class BooleanType(wtypes.UserType): + """A simple boolean type.""" + + basetype = wtypes.text + name = 'boolean' + + @staticmethod + def validate(value): + try: + return strutils.bool_from_string(value, strict=True) + except ValueError as e: + # raise Invalid to return 400 (BadRequest) in the API + raise exceptions.Invalid(six.text_type(e)) + + @staticmethod + def frombasetype(value): + if value is None: + return None + return BooleanType.validate(value) + + +class JsonType(wtypes.UserType): + """A simple JSON type.""" + + basetype = wtypes.text + name = 'json' + + def __str__(self): + # These are the json serializable native types + return ' | '.join(map(str, (wtypes.text, six.integer_types, float, + BooleanType, list, dict, None))) + + @staticmethod + def validate(value): + try: + json.dumps(value) + except TypeError: + raise exceptions.Invalid(_('%s is not JSON serializable') % value) + else: + return value + + @staticmethod + def frombasetype(value): + return JsonType.validate(value) + + +jsontype = JsonType() +uuid = UuidType() + + +class JsonPatchType(wtypes.Base): + """A complex type that represents a single json-patch operation.""" + + path = wtypes.wsattr(wtypes.StringType(pattern='^(/[\w-]+)+$'), + mandatory=True) + op = wtypes.wsattr(wtypes.Enum(str, 'add', 'replace', 'remove'), + mandatory=True) + value = wsme.wsattr(jsontype, default=wtypes.Unset) + + # The class of the objects being patched. Override this in subclasses. + _api_base = None + + # Attributes that are not required for construction, but which may not be + # removed if set. Override in subclasses if needed. + _extra_non_removable_attrs = set() + + # Set of non-removable attributes, calculated lazily. + _non_removable_attrs = None + + @staticmethod + def internal_attrs(): + """Returns a list of internal attributes. + + Internal attributes can't be added, replaced or removed. This + method may be overwritten by derived class. + + """ + return ['/created_at', '/id', '/links', '/updated_at', '/uuid'] + + @classmethod + def non_removable_attrs(cls): + """Returns a set of names of attributes that may not be removed. + + Attributes whose 'mandatory' property is True are automatically added + to this set. To add additional attributes to the set, override the + field _extra_non_removable_attrs in subclasses, with a set of the form + {'/foo', '/bar'}. + """ + if cls._non_removable_attrs is None: + cls._non_removable_attrs = cls._extra_non_removable_attrs.copy() + if cls._api_base: + fields = inspect.getmembers(cls._api_base, + lambda a: not inspect.isroutine(a)) + for name, field in fields: + if getattr(field, 'mandatory', False): + cls._non_removable_attrs.add('/%s' % name) + return cls._non_removable_attrs + + @staticmethod + def validate(patch): + _path = '/' + patch.path.split('/')[1] + if _path in patch.internal_attrs(): + msg = _("'%s' is an internal attribute and can not be updated") + raise wsme.exc.ClientSideError(msg % patch.path) + + if patch.path in patch.non_removable_attrs() and patch.op == 'remove': + msg = _("'%s' is a mandatory attribute and can not be removed") + raise wsme.exc.ClientSideError(msg % patch.path) + + if patch.op != 'remove': + if patch.value is wsme.Unset: + msg = _("'add' and 'replace' operations need a value") + raise wsme.exc.ClientSideError(msg) + + ret = {'path': patch.path, 'op': patch.op} + if patch.value is not wsme.Unset: + ret['value'] = patch.value + return ret diff --git a/fm-rest-api/fm/fm/api/controllers/v1/utils.py b/fm-rest-api/fm/fm/api/controllers/v1/utils.py new file mode 100755 index 00000000..e501c0d4 --- /dev/null +++ b/fm-rest-api/fm/fm/api/controllers/v1/utils.py @@ -0,0 +1,152 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import sys +import contextlib +import traceback +import pecan +import wsme +from oslo_config import cfg +from oslo_log import log +from oslo_utils import uuidutils + +from fm.api.controllers.v1.sysinv import cgtsclient +from fm.common import exceptions +from fm.common.i18n import _ + + +CONF = cfg.CONF + +LOG = log.getLogger(__name__) + +ALARM_ENTITY_TYPES_USE_UUID = ['port'] +ENTITY_SEP = '.' +KEY_VALUE_SEP = '=' + + +@contextlib.contextmanager +def save_and_reraise_exception(): + """Save current exception, run some code and then re-raise. + + In some cases the exception context can be cleared, resulting in None + being attempted to be re-raised after an exception handler is run. This + can happen when eventlet switches greenthreads or when running an + exception handler, code raises and catches an exception. In both + cases the exception context will be cleared. + + To work around this, we save the exception state, run handler code, and + then re-raise the original exception. If another exception occurs, the + saved exception is logged and the new exception is re-raised. + """ + type_, value, tb = sys.exc_info() + try: + yield + except Exception: + LOG.error(_('Original exception being dropped: %s'), + traceback.format_exception(type_, value, tb)) + raise + raise (type_, value, tb) + + +def validate_limit(limit): + if limit and limit < 0: + raise wsme.exc.ClientSideError(_("Limit must be positive")) + + return min(CONF.api.limit_max, limit) or CONF.api.limit_max + + +def validate_sort_dir(sort_dir): + if sort_dir not in ['asc', 'desc']: + raise wsme.exc.ClientSideError(_("Invalid sort direction: %s. " + "Acceptable values are " + "'asc' or 'desc'") % sort_dir) + return sort_dir + + +def _get_port(host_name, port_name): + hosts = cgtsclient(pecan.request.context).ihost.list() + for h in hosts: + if h.hostname == host_name: + ports = cgtsclient(pecan.request.context).port.list(h.uuid) + for p in ports: + if p.name == port_name: + return p + return None + + +def make_display_id(iid, replace=False): + if replace: + instance_id = replace_uuids(iid) + else: + instance_id = replace_name_with_uuid(iid) + + return instance_id + + +def replace_name_with_uuid(instance_id): + hName = None + port = None + for keyvalue in instance_id.split(ENTITY_SEP): + try: + (key, value) = keyvalue.split(KEY_VALUE_SEP, 1) + except ValueError: + return instance_id + + if key == 'host': + hName = value + + elif key == 'port': + if hName and not uuidutils.is_uuid_like(value.strip()): + try: + port = _get_port(hName, value) + except exceptions.NodeNotFound: + LOG.error("Can't find the host by name %s", hName) + pass + except exceptions.ServerNotFound: + LOG.error("Can't find the port for name %s", value) + pass + + if port: + new_id = key + KEY_VALUE_SEP + port.uuid + instance_id = instance_id.replace(keyvalue, new_id, 1) + + return instance_id + + +def replace_uuid_with_name(key, value): + new_id = None + if key == 'port': + port = None + try: + port = cgtsclient(pecan.request.context).port.get(value) + except exceptions.ServerNotFound: + LOG.error("Can't find the port for uuid %s", value) + pass + + if port is not None: + new_id = key + KEY_VALUE_SEP + port.name + + return new_id + + +def replace_uuids(instance_id): + for keyvalue in instance_id.split(ENTITY_SEP): + try: + (key, value) = keyvalue.split(KEY_VALUE_SEP, 1) + except ValueError: + return instance_id + + if key in ALARM_ENTITY_TYPES_USE_UUID: + if uuidutils.is_uuid_like(value.strip()): + new_id = replace_uuid_with_name(key, value) + else: + new_id = key + KEY_VALUE_SEP + value + + if new_id is not None: + instance_id = instance_id.replace(keyvalue, new_id, 1) + + return instance_id diff --git a/fm-rest-api/fm/fm/api/hooks.py b/fm-rest-api/fm/fm/api/hooks.py new file mode 100644 index 00000000..aabb2b11 --- /dev/null +++ b/fm-rest-api/fm/fm/api/hooks.py @@ -0,0 +1,88 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import webob +from pecan import hooks +from oslo_config import cfg +from oslo_log import log +from oslo_serialization import jsonutils + +from fm.common import context +from fm.db import api as dbapi +from fm.common.i18n import _ + +CONF = cfg.CONF + +LOG = log.getLogger(__name__) + + +class ContextHook(hooks.PecanHook): + """Configures a request context and attaches it to the request. + + The following HTTP request headers are used: + + X-User-Name: + Used for context.user_name. + + X-User-Id: + Used for context.user_id. + + X-Project-Name: + Used for context.project. + + X-Project-Id: + Used for context.project_id. + + X-Auth-Token: + Used for context.auth_token.# Copyright (c) 2013-2014 Wind River Systems, Inc. + + X-Roles: + Used for context.roles. + """ + + def before(self, state): + headers = state.request.headers + environ = state.request.environ + user_name = headers.get('X-User-Name') + user_id = headers.get('X-User-Id') + project = headers.get('X-Project-Name') + project_id = headers.get('X-Project-Id') + domain_id = headers.get('X-User-Domain-Id') + domain_name = headers.get('X-User-Domain-Name') + auth_token = headers.get('X-Auth-Token') + roles = headers.get('X-Roles', '').split(',') + catalog_header = headers.get('X-Service-Catalog') + service_catalog = None + if catalog_header: + try: + service_catalog = jsonutils.loads(catalog_header) + except ValueError: + raise webob.exc.HTTPInternalServerError( + _('Invalid service catalog json.')) + + auth_token_info = environ.get('keystone.token_info') + auth_url = CONF.keystone_authtoken.auth_uri + + state.request.context = context.make_context( + auth_token=auth_token, + auth_url=auth_url, + auth_token_info=auth_token_info, + user_name=user_name, + user_id=user_id, + project_name=project, + project_id=project_id, + domain_id=domain_id, + domain_name=domain_name, + roles=roles, + service_catalog=service_catalog + ) + + +class DBHook(hooks.PecanHook): + """Attach the dbapi object to the request so controllers can get to it.""" + + def before(self, state): + state.request.dbapi = dbapi.get_instance() diff --git a/fm-rest-api/fm/fm/api/middleware/__init__.py b/fm-rest-api/fm/fm/api/middleware/__init__.py new file mode 100644 index 00000000..b98b5055 --- /dev/null +++ b/fm-rest-api/fm/fm/api/middleware/__init__.py @@ -0,0 +1,5 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/fm-rest-api/fm/fm/api/middleware/auth_token.py b/fm-rest-api/fm/fm/api/middleware/auth_token.py new file mode 100644 index 00000000..98c260d6 --- /dev/null +++ b/fm-rest-api/fm/fm/api/middleware/auth_token.py @@ -0,0 +1,75 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import re + +from keystonemiddleware import auth_token +from oslo_log import log + +from fm.common import exceptions +from fm.common import utils +from fm.common.i18n import _ + +LOG = log.getLogger(__name__) + + +class AuthTokenMiddleware(auth_token.AuthProtocol): + """A wrapper on Keystone auth_token middleware. + + Does not perform verification of authentication tokens + for public routes in the API. + + """ + def __init__(self, app, conf, public_api_routes=None): + if public_api_routes is None: + public_api_routes = [] + route_pattern_tpl = '%s(\.json)?$' + + try: + self.public_api_routes = [re.compile(route_pattern_tpl % route_tpl) + for route_tpl in public_api_routes] + except re.error as e: + msg = _('Cannot compile public API routes: %s') % e + + LOG.error(msg) + raise exceptions.ConfigInvalid(error_msg=msg) + + super(AuthTokenMiddleware, self).__init__(app, conf) + + def __call__(self, env, start_response): + path = utils.safe_rstrip(env.get('PATH_INFO'), '/') + + # The information whether the API call is being performed against the + # public API is required for some other components. Saving it to the + # WSGI environment is reasonable thereby. + env['is_public_api'] = any(map(lambda pattern: re.match(pattern, path), + self.public_api_routes)) + + if env['is_public_api']: + return self._app(env, start_response) + + return super(AuthTokenMiddleware, self).__call__(env, start_response) + + @classmethod + def factory(cls, global_config, **local_conf): + public_routes = local_conf.get('acl_public_routes', '') + public_api_routes = [path.strip() for path in public_routes.split(',')] + + def _factory(app): + return cls(app, global_config, public_api_routes=public_api_routes) + return _factory diff --git a/fm-rest-api/fm/fm/cmd/__init__.py b/fm-rest-api/fm/fm/cmd/__init__.py new file mode 100644 index 00000000..b98b5055 --- /dev/null +++ b/fm-rest-api/fm/fm/cmd/__init__.py @@ -0,0 +1,5 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/fm-rest-api/fm/fm/cmd/api.py b/fm-rest-api/fm/fm/cmd/api.py new file mode 100644 index 00000000..c61992fc --- /dev/null +++ b/fm-rest-api/fm/fm/cmd/api.py @@ -0,0 +1,76 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import sys + +import eventlet +from oslo_config import cfg +from oslo_log import log as logging +from oslo_service import systemd +from oslo_service import wsgi + +import logging as std_logging + +from fm.common.i18n import _ +from fm.api import app +from fm.api import config + +api_opts = [ + cfg.StrOpt('bind_host', + default="0.0.0.0", + help=_('IP address for fm api to listen')), + cfg.IntOpt('bind_port', + default=18002, + help=_('listen port for fm api')), + cfg.IntOpt('api_workers', default=2, + help=_("number of api workers")), + cfg.IntOpt('limit_max', + default=2000, + help='the maximum number of items returned in a single ' + 'response from a collection resource') +] + + +CONF = cfg.CONF + + +LOG = logging.getLogger(__name__) +eventlet.monkey_patch(os=False) + + +def main(): + + config.init(sys.argv[1:]) + config.setup_logging() + + application = app.load_paste_app() + + CONF.register_opts(api_opts, 'api') + + host = CONF.api.bind_host + port = CONF.api.bind_port + workers = CONF.api.api_workers + + if workers < 1: + LOG.warning("Wrong worker number, worker = %(workers)s", workers) + workers = 1 + + LOG.info("Server on http://%(host)s:%(port)s with %(workers)s", + {'host': host, 'port': port, 'workers': workers}) + systemd.notify_once() + service = wsgi.Server(CONF, CONF.prog, application, host, port) + + app.serve(service, CONF, workers) + + LOG.debug("Configuration:") + CONF.log_opt_values(LOG, std_logging.DEBUG) + + app.wait() + + +if __name__ == '__main__': + main() diff --git a/fm-rest-api/fm/fm/cmd/dbsync.py b/fm-rest-api/fm/fm/cmd/dbsync.py new file mode 100644 index 00000000..7a7076bd --- /dev/null +++ b/fm-rest-api/fm/fm/cmd/dbsync.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import sys +from oslo_config import cfg + +from fm.db import migration + +CONF = cfg.CONF + + +def main(): + cfg.CONF(sys.argv[1:], project='fm') + migration.db_sync() diff --git a/fm-rest-api/fm/fm/common/__init__.py b/fm-rest-api/fm/fm/common/__init__.py new file mode 100644 index 00000000..b98b5055 --- /dev/null +++ b/fm-rest-api/fm/fm/common/__init__.py @@ -0,0 +1,5 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/fm-rest-api/fm/fm/common/constants.py b/fm-rest-api/fm/fm/common/constants.py new file mode 100644 index 00000000..b29b9d4e --- /dev/null +++ b/fm-rest-api/fm/fm/common/constants.py @@ -0,0 +1,19 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import os +import tsconfig.tsconfig as tsc + + +FM_SUPPRESSED = 'suppressed' +FM_UNSUPPRESSED = 'unsuppressed' + +DB_SUPPRESS_STATUS = 1 +DB_MGMT_AFFECTING = 2 +DB_DEGRADE_AFFECTING = 3 + + +FM_LOCK_PATH = os.path.join(tsc.VOLATILE_PATH, "fm") diff --git a/fm-rest-api/fm/fm/common/context.py b/fm-rest-api/fm/fm/common/context.py new file mode 100644 index 00000000..9dd811ef --- /dev/null +++ b/fm-rest-api/fm/fm/common/context.py @@ -0,0 +1,138 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +from oslo_context import context +from oslo_config import cfg +from keystoneauth1 import plugin +from keystoneauth1.access import service_catalog as k_service_catalog + +from fm.common import policy + + +CONF = cfg.CONF + + +class _ContextAuthPlugin(plugin.BaseAuthPlugin): + """A keystoneauth auth plugin that uses the values from the Context. + + Ideally we would use the plugin provided by auth_token middleware however + this plugin isn't serialized yet so we construct one from the serialized + auth data. + """ + + def __init__(self, auth_token, sc): + super(_ContextAuthPlugin, self).__init__() + + self.auth_token = auth_token + self.service_catalog = k_service_catalog.ServiceCatalogV2(sc) + + def get_token(self, *args, **kwargs): + return self.auth_token + + def get_endpoint(self, session, service_type=None, interface=None, + region_name=None, service_name=None, **kwargs): + return self.service_catalog.url_for(service_type=service_type, + service_name=service_name, + interface=interface, + region_name=region_name) + + +class RequestContext(context.RequestContext): + """Extends security contexts from the OpenStack common library.""" + + def __init__(self, auth_token=None, auth_url=None, domain_id=None, + domain_name=None, user_name=None, user_id=None, + user_domain_name=None, user_domain_id=None, + project_name=None, project_id=None, roles=None, + is_admin=None, read_only=False, show_deleted=False, + request_id=None, trust_id=None, auth_token_info=None, + all_tenants=False, password=None, service_catalog=None, + user_auth_plugin=None, **kwargs): + """Stores several additional request parameters: + + :param domain_id: The ID of the domain. + :param domain_name: The name of the domain. + :param user_domain_id: The ID of the domain to + authenticate a user against. + :param user_domain_name: The name of the domain to + authenticate a user against. + + """ + super(RequestContext, self).__init__(auth_token=auth_token, + user=user_name, + tenant=project_name, + is_admin=is_admin, + read_only=read_only, + show_deleted=show_deleted, + request_id=request_id, + roles=roles) + + self.user_name = user_name + self.user_id = user_id + self.project_name = project_name + self.project_id = project_id + self.domain_id = domain_id + self.domain_name = domain_name + self.user_domain_id = user_domain_id + self.user_domain_name = user_domain_name + self.auth_url = auth_url + self.auth_token_info = auth_token_info + self.trust_id = trust_id + self.all_tenants = all_tenants + self.password = password + + if service_catalog: + # Only include required parts of service_catalog + self.service_catalog = [s for s in service_catalog + if s.get('type') in + ('platform', )] + else: + # if list is empty or none + self.service_catalog = [] + + self.user_auth_plugin = user_auth_plugin + if is_admin is None: + self.is_admin = policy.check_is_admin(self) + else: + self.is_admin = is_admin + + def to_dict(self): + value = super(RequestContext, self).to_dict() + value.update({'auth_token': self.auth_token, + 'auth_url': self.auth_url, + 'domain_id': self.domain_id, + 'domain_name': self.domain_name, + 'user_domain_id': self.user_domain_id, + 'user_domain_name': self.user_domain_name, + 'user_name': self.user_name, + 'user_id': self.user_id, + 'project_name': self.project_name, + 'project_id': self.project_id, + 'is_admin': self.is_admin, + 'read_only': self.read_only, + 'roles': self.roles, + 'show_deleted': self.show_deleted, + 'request_id': self.request_id, + 'trust_id': self.trust_id, + 'auth_token_info': self.auth_token_info, + 'password': self.password, + 'all_tenants': self.all_tenants, + 'service_catalog': self.service_catalog}) + return value + + @classmethod + def from_dict(cls, values): + return cls(**values) + + def get_auth_plugin(self): + if self.user_auth_plugin: + return self.user_auth_plugin + else: + return _ContextAuthPlugin(self.auth_token, self.service_catalog) + + +def make_context(*args, **kwargs): + return RequestContext(*args, **kwargs) diff --git a/fm-rest-api/fm/fm/common/exceptions.py b/fm-rest-api/fm/fm/common/exceptions.py new file mode 100644 index 00000000..5f7b910b --- /dev/null +++ b/fm-rest-api/fm/fm/common/exceptions.py @@ -0,0 +1,109 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import six +import webob.exc +from oslo_utils._i18n import _ +from oslo_log import log as logging + +LOG = logging.getLogger(__name__) + + +class ApiError(Exception): + + message = _("An unknown exception occurred.") + + code = webob.exc.HTTPInternalServerError + + def __init__(self, message=None, **kwargs): + + self.kwargs = kwargs + + if 'code' not in self.kwargs and hasattr(self, 'code'): + self.kwargs['code'] = self.code + + if message: + self.message = message + + try: + super(ApiError, self).__init__(self.message % kwargs) + self.message = self.message % kwargs + except Exception: + LOG.exception('Exception in string format operation, ' + 'kwargs: %s', kwargs) + raise + + def __str__(self): + return repr(self.value) + + def __unicode__(self): + return self.message + + def format_message(self): + if self.__class__.__name__.endswith('_Remote'): + return self.args[0] + else: + return six.text_type(self) + + +class NotFound(ApiError): + message = _("Resource could not be found.") + code = webob.exc.HTTPNotFound + + +class HTTPNotFound(NotFound): + pass + + +class AlarmNotFound(NotFound): + message = _("Alarm %(alarm)s could not be found.") + + +class EventLogNotFound(NotFound): + message = _("Event Log %(eventLog)s could not be found.") + + +class NodeNotFound(NotFound): + message = _("Node %(node)s could not be found.") + + +class ServerNotFound(NotFound): + message = _("Server %(server)s could not be found.") + + +class Invalid(ApiError): + message = _("Unacceptable parameters.") + code = webob.exc.HTTPBadRequest + + +class PatchError(Invalid): + message = _("Couldn't apply patch '%(patch)s'. Reason: %(reason)s") + + +class ConfigInvalid(Invalid): + message = _("Invalid configuration file. %(error_msg)s") + + +class InvalidParameterValue(Invalid): + message = _("%(err)s") + + +class InvalidIdentity(Invalid): + message = _("Expected an uuid or int but received %(identity)s.") + + +class PolicyNotAuthorized(ApiError): + message = _("Policy doesn't allow %(action)s to be performed.") + code = webob.exc.HTTPUnauthorized + + +class Conflict(ApiError): + message = _('HTTP Conflict.') + code = webob.exc.HTTPConflict + + +class AlarmAlreadyExists(Conflict): + message = _("An Alarm with UUID %(uuid)s already exists.") diff --git a/fm-rest-api/fm/fm/common/i18n.py b/fm-rest-api/fm/fm/common/i18n.py new file mode 100644 index 00000000..d1e26878 --- /dev/null +++ b/fm-rest-api/fm/fm/common/i18n.py @@ -0,0 +1,12 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import oslo_i18n + +_translators = oslo_i18n.TranslatorFactory(domain='fm') + +# The primary translation function using the well-known name "_" +_ = _translators.primary diff --git a/fm-rest-api/fm/fm/common/policy.py b/fm-rest-api/fm/fm/common/policy.py new file mode 100644 index 00000000..e1a18ab3 --- /dev/null +++ b/fm-rest-api/fm/fm/common/policy.py @@ -0,0 +1,89 @@ +# Copyright (c) 2011 OpenStack Foundation +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# +"""Policy Engine For FM.""" + +from oslo_config import cfg + +from oslo_policy import policy +from oslo_log import log + + +base_rules = [ + policy.RuleDefault('admin_required', 'role:admin or is_admin:1', + description='Who is considered an admin'), + policy.RuleDefault('admin_api', 'is_admin_required:True', + description='admin API requirement'), + policy.RuleDefault('default', 'rule:admin_api', + description='default rule'), +] + +CONF = cfg.CONF + + +LOG = log.getLogger(__name__) + +_ENFORCER = None + + +# we can get a policy enforcer by this init. +# oslo policy support change policy rule dynamically. +# at present, policy.enforce will reload the policy rules when it checks +# the policy files have been touched. +def init(policy_file=None, rules=None, + default_rule=None, use_conf=True, overwrite=True): + """Init an Enforcer class. + + :param policy_file: Custom policy file to use, if none is + specified, ``conf.policy_file`` will be + used. + :param rules: Default dictionary / Rules to use. It will be + considered just in the first instantiation. If + :meth:`load_rules` with ``force_reload=True``, + :meth:`clear` or :meth:`set_rules` with + ``overwrite=True`` is called this will be overwritten. + :param default_rule: Default rule to use, conf.default_rule will + be used if none is specified. + :param use_conf: Whether to load rules from cache or config file. + :param overwrite: Whether to overwrite existing rules when reload rules + from config file. + """ + global _ENFORCER + if not _ENFORCER: + # http://docs.openstack.org/developer/oslo.policy/usage.html + _ENFORCER = policy.Enforcer(CONF, + policy_file=policy_file, + rules=rules, + default_rule=default_rule, + use_conf=use_conf, + overwrite=overwrite) + _ENFORCER.register_defaults(base_rules) + return _ENFORCER + + +def check_is_admin(context): + """Whether or not role contains 'admin' role according to policy setting. + + """ + init() + + target = {} + credentials = context.to_dict() + + return _ENFORCER.enforce('context_is_admin', target, credentials) diff --git a/fm-rest-api/fm/fm/common/timeutils.py b/fm-rest-api/fm/fm/common/timeutils.py new file mode 100644 index 00000000..77d76e47 --- /dev/null +++ b/fm-rest-api/fm/fm/common/timeutils.py @@ -0,0 +1,184 @@ +# Copyright 2011 OpenStack Foundation. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Time related utilities and helper functions. +""" + +import calendar +import datetime + +import iso8601 + + +# ISO 8601 extended time format with microseconds +_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f' +_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' +PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND + + +def isotime(at=None, subsecond=False): + """Stringify time in ISO 8601 format""" + if not at: + at = utcnow() + st = at.strftime(_ISO8601_TIME_FORMAT + if not subsecond + else _ISO8601_TIME_FORMAT_SUBSECOND) + tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' + st += ('Z' if tz == 'UTC' else tz) + return st + + +def parse_isotime(timestr): + """Parse time from ISO 8601 format""" + try: + return iso8601.parse_date(timestr) + except iso8601.ParseError as e: + raise ValueError(e.message) + except TypeError as e: + raise ValueError(e.message) + + +def strtime(at=None, fmt=PERFECT_TIME_FORMAT): + """Returns formatted utcnow.""" + if not at: + at = utcnow() + return at.strftime(fmt) + + +def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT): + """Turn a formatted time back into a datetime.""" + return datetime.datetime.strptime(timestr, fmt) + + +def normalize_time(timestamp): + """Normalize time in arbitrary timezone to UTC naive object""" + offset = timestamp.utcoffset() + if offset is None: + return timestamp + return timestamp.replace(tzinfo=None) - offset + + +def is_older_than(before, seconds): + """Return True if before is older than seconds.""" + if isinstance(before, str): + before = parse_strtime(before).replace(tzinfo=None) + return utcnow() - before > datetime.timedelta(seconds=seconds) + + +def is_newer_than(after, seconds): + """Return True if after is newer than seconds.""" + if isinstance(after, str): + after = parse_strtime(after).replace(tzinfo=None) + return after - utcnow() > datetime.timedelta(seconds=seconds) + + +def utcnow_ts(): + """Timestamp version of our utcnow function.""" + return calendar.timegm(utcnow().timetuple()) + + +def utcnow(): + """Overridable version of utils.utcnow.""" + if utcnow.override_time: + try: + return utcnow.override_time.pop(0) + except AttributeError: + return utcnow.override_time + return datetime.datetime.utcnow() + + +def iso8601_from_timestamp(timestamp): + """Returns a iso8601 formated date from timestamp""" + return isotime(datetime.datetime.utcfromtimestamp(timestamp)) + + +utcnow.override_time = None + + +def set_time_override(override_time=datetime.datetime.utcnow()): + """ + Override utils.utcnow to return a constant time or a list thereof, + one at a time. + """ + utcnow.override_time = override_time + + +def advance_time_delta(timedelta): + """Advance overridden time using a datetime.timedelta.""" + assert(utcnow.override_time is not None) + try: + for dt in utcnow.override_time: + dt += timedelta + except TypeError: + utcnow.override_time += timedelta + + +def advance_time_seconds(seconds): + """Advance overridden time by seconds.""" + advance_time_delta(datetime.timedelta(0, seconds)) + + +def clear_time_override(): + """Remove the overridden time.""" + utcnow.override_time = None + + +def marshall_now(now=None): + """Make an rpc-safe datetime with microseconds. + + Note: tzinfo is stripped, but not required for relative times.""" + if not now: + now = utcnow() + return dict(day=now.day, month=now.month, year=now.year, hour=now.hour, + minute=now.minute, second=now.second, + microsecond=now.microsecond) + + +def unmarshall_time(tyme): + """Unmarshall a datetime dict.""" + return datetime.datetime(day=tyme['day'], + month=tyme['month'], + year=tyme['year'], + hour=tyme['hour'], + minute=tyme['minute'], + second=tyme['second'], + microsecond=tyme['microsecond']) + + +def delta_seconds(before, after): + """ + Compute the difference in seconds between two date, time, or + datetime objects (as a float, to microsecond resolution). + """ + delta = after - before + try: + return delta.total_seconds() + except AttributeError: + return ((delta.days * 24 * 3600) + delta.seconds + + float(delta.microseconds) / (10 ** 6)) + + +def is_soon(dt, window): + """ + Determines if time is going to happen in the next window seconds. + + :params dt: the time + :params window: minimum seconds to remain to consider the time not soon + + :return: True if expiration is within the given duration + """ + soon = (utcnow() + datetime.timedelta(seconds=window)) + return normalize_time(dt) <= soon diff --git a/fm-rest-api/fm/fm/common/utils.py b/fm-rest-api/fm/fm/common/utils.py new file mode 100644 index 00000000..3149837c --- /dev/null +++ b/fm-rest-api/fm/fm/common/utils.py @@ -0,0 +1,62 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (c) 2012 Intel Corporation. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2013-2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import six +import uuid +from oslo_log import log +from oslo_concurrency import lockutils + +from fm.common import constants + +LOG = log.getLogger(__name__) + + +def generate_uuid(): + return str(uuid.uuid4()) + + +def synchronized(name, external=True): + if external: + lock_path = constants.FM_LOCK_PATH + else: + lock_path = None + return lockutils.synchronized(name, + lock_file_prefix='fm-', + external=external, + lock_path=lock_path) + + +def safe_rstrip(value, chars=None): + """Removes trailing characters from a string if that does not make it empty + + :param value: A string value that will be stripped. + :param chars: Characters to remove. + :return: Stripped value. + + """ + if not isinstance(value, six.string_types): + LOG.warning("Failed to remove trailing character. " + "Returning original object. " + "Supplied object is not a string: %s,", value) + return value + + return value.rstrip(chars) or value diff --git a/fm-rest-api/fm/fm/config-generator.conf b/fm-rest-api/fm/fm/config-generator.conf new file mode 100644 index 00000000..748bfa44 --- /dev/null +++ b/fm-rest-api/fm/fm/config-generator.conf @@ -0,0 +1,11 @@ +[DEFAULT] +output_file = fm.conf.sample +wrap_width = 79 +namespace = fm.api.conf +namespace = keystonemiddleware.auth_token +namespace = oslo.middleware +namespace = oslo.log +namespace = oslo.policy +namespace = oslo.db + + diff --git a/fm-rest-api/fm/fm/db/__init__.py b/fm-rest-api/fm/fm/db/__init__.py new file mode 100644 index 00000000..b98b5055 --- /dev/null +++ b/fm-rest-api/fm/fm/db/__init__.py @@ -0,0 +1,5 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/fm-rest-api/fm/fm/db/api.py b/fm-rest-api/fm/fm/db/api.py new file mode 100644 index 00000000..03d89b66 --- /dev/null +++ b/fm-rest-api/fm/fm/db/api.py @@ -0,0 +1,152 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +""" +Base classes for storage engines +""" + +import abc + +from oslo_config import cfg +from oslo_db import api as db_api + + +_BACKEND_MAPPING = {'sqlalchemy': 'fm.db.sqlalchemy.api'} +IMPL = db_api.DBAPI.from_config(cfg.CONF, backend_mapping=_BACKEND_MAPPING, + lazy=True) + + +def get_instance(): + """Return a DB API instance.""" + return IMPL + + +class Connection(object): + """Base class for storage system connections.""" + + __metaclass__ = abc.ABCMeta + + @abc.abstractmethod + def __init__(self): + """Constructor.""" + + @abc.abstractmethod + def alarm_create(self, values): + """Create a new alarm. + + :param values: A dict containing several items used to identify + and track the alarm. + :returns: An alarm. + """ + + @abc.abstractmethod + def alarm_get(self, uuid): + """Return an alarm. + + :param uuid: The uuid of an alarm. + :returns: An alarm. + """ + + @abc.abstractmethod + def alarm_get_by_ids(self, alarm_id, entity_instance_id): + """Return an alarm. + + :param alarm_id: The alarm_id of an alarm. + :param entity_instance_id: The entity_instance_id of an alarm. + :returns: An alarm. + """ + + @abc.abstractmethod + def alarm_get_all(self, uuid=None, alarm_id=None, entity_type_id=None, + entity_instance_id=None, severity=None, alarm_type=None): + """Return a list of alarms for the given filters. + + :param uuid: The uuid of an alarm. + :param alarm_id: The alarm_id of an alarm. + :param entity_type_id: The entity_type_id of an alarm. + :param entity_instance_id: The entity_instance_id of an alarm. + :param severity: The severity of an alarm. + :param alarm_type: The alarm_type of an alarm. + :returns: alarms. + """ + + @abc.abstractmethod + def alarm_get_list(self, limit=None, marker=None, + sort_key=None, sort_dir=None): + """Return a list of alarms. + + :param limit: Maximum number of alarm to return. + :param marker: the last item of the previous page; we return the next + result set. + :param sort_key: Attribute by which results should be sorted. + :param sort_dir: direction in which results should be sorted. + (asc, desc) + """ + + @abc.abstractmethod + def alarm_update(self, id, values): + """Update properties of an alarm. + + :param id: The id or uuid of an alarm. + :param values: Dict of values to update. + + :returns: An alarm. + """ + + @abc.abstractmethod + def alarm_destroy(self, id): + """Destroy an alarm. + + :param id: The id or uuid of an alarm. + """ + + @abc.abstractmethod + def alarm_destroy_by_ids(self, alarm_id, entity_instance_id): + """Destroy an alarm. + + :param alarm_id: The alarm_id of an alarm. + :param entity_instance_id: The entity_instance_id of an alarm. + + """ + + @abc.abstractmethod + def event_log_get(self, uuid): + """Return an event_log. + + :param uuid: The uuid of an event_log. + :returns: An event_log. + """ + + @abc.abstractmethod + def event_log_get_all(self, uuid=None, event_log_id=None, + entity_type_id=None, entity_instance_id=None, + severity=None, event_log_type=None, start=None, + end=None, limit=None): + """Return a list of event_log for the given filters. + + :param uuid: The uuid of an event_log. + :param event_log_id: The id of an event_log. + :param entity_type_id: The entity_type_id of an event_log. + :param entity_instance_id: The entity_instance_id of an event_log. + :param severity: The severity of an event_log. + :param alarm_type: The alarm_type of an event_log. + :param start: The event_logs that occurred after start + :param end: The event_logs that occurred before end + :returns: event_log. + """ + + @abc.abstractmethod + def event_log_get_list(self, limit=None, marker=None, + sort_key=None, sort_dir=None, evtType="ALL"): + """Return a list of event_log. + + :param limit: Maximum number of event_log to return. + :param marker: the last item of the previous page; we return the next + result set. + :param sort_key: Attribute by which results should be sorted. + :param sort_dir: direction in which results should be sorted. + (asc, desc) + """ diff --git a/fm-rest-api/fm/fm/db/migration.py b/fm-rest-api/fm/fm/db/migration.py new file mode 100644 index 00000000..32cfa0e5 --- /dev/null +++ b/fm-rest-api/fm/fm/db/migration.py @@ -0,0 +1,57 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# under the License. + +"""Database setup and migration commands.""" + +import os +from oslo_config import cfg +from oslo_db import options + +from stevedore import driver +from fm.db.sqlalchemy import api as db_api + +options.set_defaults(cfg.CONF) + + +_IMPL = None + +MIGRATE_REPO_PATH = os.path.join( + os.path.abspath(os.path.dirname(__file__)), + 'sqlalchemy', + 'migrate_repo', +) + + +def get_backend(): + global _IMPL + if not _IMPL: + _IMPL = driver.DriverManager("fm.database.migration_backend", + cfg.CONF.database.backend).driver + return _IMPL + + +def db_sync(version=None, engine=None): + """Migrate the database to `version` or the most recent version.""" + + if engine is None: + engine = db_api.get_engine() + return get_backend().db_sync(engine=engine, + abs_path=MIGRATE_REPO_PATH, + version=version + ) + + +def upgrade(version=None): + """Migrate the database to `version` or the most recent version.""" + return get_backend().upgrade(version) + + +def version(): + return get_backend().version() + + +def create_schema(): + return get_backend().create_schema() diff --git a/fm-rest-api/fm/fm/db/sqlalchemy/__init__.py b/fm-rest-api/fm/fm/db/sqlalchemy/__init__.py new file mode 100644 index 00000000..b98b5055 --- /dev/null +++ b/fm-rest-api/fm/fm/db/sqlalchemy/__init__.py @@ -0,0 +1,5 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/fm-rest-api/fm/fm/db/sqlalchemy/api.py b/fm-rest-api/fm/fm/db/sqlalchemy/api.py new file mode 100755 index 00000000..d61c5df4 --- /dev/null +++ b/fm-rest-api/fm/fm/db/sqlalchemy/api.py @@ -0,0 +1,445 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +"""SQLAlchemy storage backend.""" + +import threading + +from oslo_log import log +from oslo_config import cfg +from oslo_utils import uuidutils + +from oslo_db import exception as db_exc +from oslo_db.sqlalchemy import enginefacade +from oslo_db.sqlalchemy import utils as db_utils +from oslo_db.sqlalchemy import session as db_session + +from sqlalchemy import asc, desc, or_ +from sqlalchemy.orm.exc import NoResultFound + +from fm.common import constants +from fm.common import exceptions +from fm.common import utils +from fm.db import api +from fm.db.sqlalchemy import models +from fm import objects + + +CONF = cfg.CONF + +LOG = log.getLogger(__name__) + +_LOCK = threading.Lock() +_FACADE = None + +context_manager = enginefacade.transaction_context() +context_manager.configure() + + +def _create_facade_lazily(): + global _LOCK + with _LOCK: + global _FACADE + if _FACADE is None: + _FACADE = db_session.EngineFacade( + CONF.database.connection, + **dict(CONF.database) + ) + return _FACADE + + +def get_engine(): + facade = _create_facade_lazily() + return facade.get_engine() + + +def get_session(**kwargs): + facade = _create_facade_lazily() + return facade.get_session(**kwargs) + + +def get_backend(): + """The backend is this module itself.""" + return Connection() + + +def _session_for_read(): + _context = threading.local() + return enginefacade.reader.using(_context) + + +def _session_for_write(): + _context = threading.local() + LOG.debug("_session_for_write CONTEXT=%s" % _context) + return enginefacade.writer.using(_context) + + +def _paginate_query(model, limit=None, marker=None, sort_key=None, + sort_dir=None, query=None): + if not query: + query = model_query(model) + + if not sort_key: + sort_keys = [] + elif not isinstance(sort_key, list): + sort_keys = [sort_key] + else: + sort_keys = sort_key + + if 'id' not in sort_keys: + sort_keys.append('id') + query = db_utils.paginate_query(query, model, limit, sort_keys, + marker=marker, sort_dir=sort_dir) + return query.all() + + +def model_query(model, *args, **kwargs): + """Query helper for simpler session usage. + + :param session: if present, the session to use + """ + + with _session_for_read() as session: + query = session.query(model, *args) + return query + + +def add_event_log_filter_by_event_suppression(query, include_suppress): + """Adds an event_suppression filter to a query. + + Filters results by suppression status + + :param query: Initial query to add filter to. + :param include_suppress: Value for filtering results by. + :return: Modified query. + """ + query = query.outerjoin(models.EventSuppression, + models.EventLog.event_log_id == models.EventSuppression.alarm_id) + + query = query.add_columns(models.EventSuppression.suppression_status) + + if include_suppress: + return query + + return query.filter(or_(models.EventLog.state == 'log', + models.EventSuppression.suppression_status == + constants.FM_UNSUPPRESSED)) + + +def add_alarm_filter_by_event_suppression(query, include_suppress): + """Adds an event_suppression filter to a query. + + Filters results by suppression status + + :param query: Initial query to add filter to. + :param include_suppress: Value for filtering results by. + :return: Modified query. + """ + query = query.join(models.EventSuppression, + models.Alarm.alarm_id == models.EventSuppression.alarm_id) + + query = query.add_columns(models.EventSuppression.suppression_status) + + if include_suppress: + return query + + return query.filter(models.EventSuppression.suppression_status == + constants.FM_UNSUPPRESSED) + + +def add_alarm_mgmt_affecting_by_event_suppression(query): + """Adds a mgmt_affecting attribute from event_suppression to query. + + :param query: Initial query. + :return: Modified query. + """ + query = query.add_columns(models.EventSuppression.mgmt_affecting) + return query + + +def add_alarm_degrade_affecting_by_event_suppression(query): + """Adds a degrade_affecting attribute from event_suppression to query. + + :param query: Initial query. + :return: Modified query. + """ + query = query.add_columns(models.EventSuppression.degrade_affecting) + return query + + +class Connection(api.Connection): + """SqlAlchemy connection.""" + + def __init__(self): + pass + + def get_session(self, autocommit=True): + return get_session(autocommit) + + def alarm_create(self, values): + if not values.get('uuid'): + values['uuid'] = utils.generate_uuid() + alarm = models.Alarm() + alarm.update(values) + with _session_for_write() as session: + try: + session.add(alarm) + session.flush() + except db_exc.DBDuplicateEntry: + raise exceptions.AlarmAlreadyExists(uuid=values['uuid']) + return alarm + + @objects.objectify(objects.alarm) + def alarm_get(self, uuid): + query = model_query(models.Alarm) + + if uuid: + query = query.filter_by(uuid=uuid) + + query = add_alarm_filter_by_event_suppression(query, include_suppress=True) + query = add_alarm_mgmt_affecting_by_event_suppression(query) + query = add_alarm_degrade_affecting_by_event_suppression(query) + + try: + result = query.one() + except NoResultFound: + raise exceptions.AlarmNotFound(alarm=uuid) + + return result + + def alarm_get_by_ids(self, alarm_id, entity_instance_id): + query = model_query(models.Alarm) + if alarm_id and entity_instance_id: + query = query.filter_by(alarm_id=alarm_id) + query = query.filter_by(entity_instance_id=entity_instance_id) + + query = query.join(models.EventSuppression, + models.Alarm.alarm_id == + models.EventSuppression.alarm_id) + query = add_alarm_mgmt_affecting_by_event_suppression(query) + query = add_alarm_degrade_affecting_by_event_suppression(query) + + try: + result = query.one() + except NoResultFound: + return None + + return result + + def alarm_get_all(self, uuid=None, alarm_id=None, entity_type_id=None, + entity_instance_id=None, severity=None, alarm_type=None, + limit=None, include_suppress=False): + query = model_query(models.Alarm, read_deleted="no") + query = query.order_by(asc(models.Alarm.severity), + asc(models.Alarm.entity_instance_id), + asc(models.Alarm.id)) + if uuid is not None: + query = query.filter(models.Alarm.uuid.contains(uuid)) + if alarm_id is not None: + query = query.filter(models.Alarm.alarm_id.contains(alarm_id)) + if entity_type_id is not None: + query = query.filter(models.Alarm.entity_type_id.contains( + entity_type_id)) + if entity_instance_id is not None: + query = query.filter(models.Alarm.entity_instance_id.contains( + entity_instance_id)) + if severity is not None: + query = query.filter(models.Alarm.severity.contains(severity)) + if alarm_type is not None: + query = query.filter(models.Alarm.alarm_type.contains(alarm_type)) + query = add_alarm_filter_by_event_suppression(query, include_suppress) + query = add_alarm_mgmt_affecting_by_event_suppression(query) + query = add_alarm_degrade_affecting_by_event_suppression(query) + if limit is not None: + query = query.limit(limit) + alarm_list = [] + try: + alarm_list = query.all() + except UnicodeDecodeError: + LOG.error("UnicodeDecodeError occurred, " + "return an empty alarm list.") + return alarm_list + + @objects.objectify(objects.alarm) + def alarm_get_list(self, limit=None, marker=None, + sort_key=None, sort_dir=None, + include_suppress=False): + + query = model_query(models.Alarm) + query = add_alarm_filter_by_event_suppression(query, include_suppress) + query = add_alarm_mgmt_affecting_by_event_suppression(query) + query = add_alarm_degrade_affecting_by_event_suppression(query) + + return _paginate_query(models.Alarm, limit, marker, + sort_key, sort_dir, query) + + def alarm_update(self, id, values): + with _session_for_write() as session: + query = model_query(models.Alarm, session=session) + query = query.filter_by(id=id) + + count = query.update(values, synchronize_session='fetch') + if count != 1: + raise exceptions.AlarmNotFound(alarm=id) + return query.one() + + def alarm_destroy(self, id): + with _session_for_write() as session: + query = model_query(models.Alarm, session=session) + query = query.filter_by(uuid=id) + + try: + query.one() + except NoResultFound: + raise exceptions.AlarmNotFound(alarm=id) + + query.delete() + + def alarm_destroy_by_ids(self, alarm_id, entity_instance_id): + with _session_for_write() as session: + query = model_query(models.Alarm, session=session) + if alarm_id and entity_instance_id: + query = query.filter_by(alarm_id=alarm_id) + query = query.filter_by(entity_instance_id=entity_instance_id) + + try: + query.one() + except NoResultFound: + raise exceptions.AlarmNotFound(alarm=alarm_id) + + query.delete() + + @objects.objectify(objects.event_log) + def event_log_get(self, uuid): + query = model_query(models.EventLog) + + if uuid: + query = query.filter_by(uuid=uuid) + + query = add_event_log_filter_by_event_suppression(query, + include_suppress=True) + + try: + result = query.one() + except NoResultFound: + raise exceptions.EventLogNotFound(eventLog=uuid) + + return result + + def _addEventTypeToQuery(self, query, evtType="ALL"): + if evtType is None or not (evtType in ["ALL", "ALARM", "LOG"]): + evtType = "ALL" + + if evtType == "ALARM": + query = query.filter(or_(models.EventLog.state == "set", + models.EventLog.state == "clear")) + if evtType == "LOG": + query = query.filter(models.EventLog.state == "log") + + return query + + @objects.objectify(objects.event_log) + def event_log_get_all(self, uuid=None, event_log_id=None, + entity_type_id=None, entity_instance_id=None, + severity=None, event_log_type=None, start=None, + end=None, limit=None, evtType="ALL", include_suppress=False): + query = model_query(models.EventLog, read_deleted="no") + query = query.order_by(desc(models.EventLog.timestamp)) + if uuid is not None: + query = query.filter_by(uuid=uuid) + + query = self._addEventTypeToQuery(query, evtType) + + if event_log_id is not None: + query = query.filter(models.EventLog.event_log_id.contains( + event_log_id)) + if entity_type_id is not None: + query = query.filter(models.EventLog.entity_type_id.contains( + entity_type_id)) + if entity_instance_id is not None: + query = query.filter(models.EventLog.entity_instance_id.contains( + entity_instance_id)) + if severity is not None: + query = query.filter(models.EventLog.severity.contains(severity)) + + if event_log_type is not None: + query = query.filter_by(event_log_type=event_log_type) + if start is not None: + query = query.filter(models.EventLog.timestamp >= start) + if end is not None: + query = query.filter(models.EventLog.timestamp <= end) + if include_suppress is not None: + query = add_event_log_filter_by_event_suppression(query, + include_suppress) + if limit is not None: + query = query.limit(limit) + + hist_list = [] + try: + hist_list = query.all() + except UnicodeDecodeError: + LOG.error("UnicodeDecodeError occurred, " + "return an empty event log list.") + return hist_list + + @objects.objectify(objects.event_log) + def event_log_get_list(self, limit=None, marker=None, + sort_key=None, sort_dir=None, evtType="ALL", + include_suppress=False): + + query = model_query(models.EventLog) + query = self._addEventTypeToQuery(query, evtType) + query = add_event_log_filter_by_event_suppression(query, + include_suppress) + + return _paginate_query(models.EventLog, limit, marker, + sort_key, sort_dir, query) + + @objects.objectify(objects.event_suppression) + def event_suppression_get(self, id): + query = model_query(models.EventSuppression) + if uuidutils.is_uuid_like(id): + query = query.filter_by(uuid=id) + else: + query = query.filter_by(id=id) + + try: + result = query.one() + except NoResultFound: + raise exceptions.InvalidParameterValue( + err="No event suppression entry found for %s" % id) + + return result + + @objects.objectify(objects.event_suppression) + def event_suppression_get_all(self, uuid=None, alarm_id=None, + description=None, suppression_status=None, limit=None, + sort_key=None, sort_dir=None): + query = model_query(models.EventSuppression, read_deleted="no") + if uuid is not None: + query = query.filter_by(uuid=uuid) + if alarm_id is not None: + query = query.filter_by(alarm_id=alarm_id) + if description is not None: + query = query.filter_by(description=description) + if suppression_status is not None: + query = query.filter_by(suppression_status=suppression_status) + + query = query.filter_by(set_for_deletion=False) + + return _paginate_query(models.EventSuppression, limit, None, + sort_key, sort_dir, query) + + @objects.objectify(objects.event_suppression) + def event_suppression_update(self, uuid, values): + with _session_for_write() as session: + query = model_query(models.EventSuppression, session=session) + query = query.filter_by(uuid=uuid) + + count = query.update(values, synchronize_session='fetch') + if count != 1: + raise exceptions.NotFound(id) + return query.one() diff --git a/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/__init__.py b/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/__init__.py new file mode 100644 index 00000000..b98b5055 --- /dev/null +++ b/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/__init__.py @@ -0,0 +1,5 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/manage.py b/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/manage.py new file mode 100644 index 00000000..1d50f84d --- /dev/null +++ b/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/manage.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +from migrate.versioning.shell import main + + +if __name__ == '__main__': + main(debug='False', repository='.') diff --git a/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/migrate.cfg b/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/migrate.cfg new file mode 100644 index 00000000..14bf9534 --- /dev/null +++ b/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/migrate.cfg @@ -0,0 +1,20 @@ +[db_settings] +# Used to identify which repository this database is versioned under. +# You can use the name of your project. +repository_id=fm + +# The name of the database table used to track the schema version. +# This name shouldn't already be used by your project. +# If this is changed once a database is under version control, you'll need to +# change the table name in each database too. +version_table=migrate_version + +# When committing a change script, Migrate will attempt to generate the +# sql for all supported databases; normally, if one of them fails - probably +# because you don't have that database installed - it is ignored and the +# commit continues, perhaps ending successfully. +# Databases in this list MUST compile successfully during a commit, or the +# entire commit will fail. List the databases your application will actually +# be using to ensure your updates to that database work properly. +# This must be a list; example: ['postgres','sqlite'] +required_dbs=[] diff --git a/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/versions/001_init.py b/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/versions/001_init.py new file mode 100644 index 00000000..63a36e91 --- /dev/null +++ b/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/versions/001_init.py @@ -0,0 +1,112 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +from sqlalchemy import Column, MetaData, String, Table +from sqlalchemy import Boolean, Integer, DateTime +from sqlalchemy.schema import ForeignKeyConstraint + +ENGINE = 'InnoDB' +CHARSET = 'utf8' + + +def upgrade(migrate_engine): + meta = MetaData() + meta.bind = migrate_engine + + event_suppression = Table( + 'event_suppression', + meta, + Column('created_at', DateTime), + Column('updated_at', DateTime), + Column('deleted_at', DateTime), + + Column('id', Integer, primary_key=True, nullable=False), + Column('uuid', String(36), unique=True, index=True), + Column('alarm_id', String(15), unique=True, index=True), + Column('description', String(255)), + Column('suppression_status', String(15)), + Column('set_for_deletion', Boolean), + Column('mgmt_affecting', String(255)), + Column('degrade_affecting', String(255)), + + mysql_engine=ENGINE, + mysql_charset=CHARSET, + ) + event_suppression.create() + + alarm = Table( + 'alarm', + meta, + Column('created_at', DateTime), + Column('updated_at', DateTime), + Column('deleted_at', DateTime), + + Column('id', Integer, primary_key=True, nullable=False), + Column('uuid', String(255), unique=True, index=True), + Column('alarm_id', String(255), index=True), + Column('alarm_state', String(255)), + Column('entity_type_id', String(255), index=True), + Column('entity_instance_id', String(255), index=True), + Column('timestamp', DateTime(timezone=False)), + Column('severity', String(255), index=True), + Column('reason_text', String(255)), + Column('alarm_type', String(255), index=True), + Column('probable_cause', String(255)), + Column('proposed_repair_action', String(255)), + Column('service_affecting', Boolean), + Column('suppression', Boolean), + Column('inhibit_alarms', Boolean), + Column('masked', Boolean), + ForeignKeyConstraint( + ['alarm_id'], + ['event_suppression.alarm_id'], + use_alter=True, + name='fk_alarm_esuppression_alarm_id' + ), + + mysql_engine=ENGINE, + mysql_charset=CHARSET, + ) + alarm.create() + + event_log = Table( + 'event_log', + meta, + Column('created_at', DateTime), + Column('updated_at', DateTime), + Column('deleted_at', DateTime), + + Column('id', Integer, primary_key=True, nullable=False), + Column('uuid', String(255), unique=True, index=True), + Column('event_log_id', String(255), index=True), + Column('state', String(255)), + Column('entity_type_id', String(255), index=True), + Column('entity_instance_id', String(255), index=True), + Column('timestamp', DateTime(timezone=False)), + Column('severity', String(255), index=True), + Column('reason_text', String(255)), + Column('event_log_type', String(255), index=True), + Column('probable_cause', String(255)), + Column('proposed_repair_action', String(255)), + Column('service_affecting', Boolean), + Column('suppression', Boolean), + Column('alarm_id', String(255), nullable=True), + ForeignKeyConstraint( + ['alarm_id'], + ['event_suppression.alarm_id'], + use_alter=True, + name='fk_elog_alarm_id_esuppression_alarm_id' + ), + + mysql_engine=ENGINE, + mysql_charset=CHARSET, + ) + event_log.create() + + +def downgrade(migrate_engine): + raise NotImplementedError('Downgrade from Initial is unsupported.') diff --git a/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/versions/__init__.py b/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/versions/__init__.py new file mode 100644 index 00000000..b98b5055 --- /dev/null +++ b/fm-rest-api/fm/fm/db/sqlalchemy/migrate_repo/versions/__init__.py @@ -0,0 +1,5 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/fm-rest-api/fm/fm/db/sqlalchemy/migration.py b/fm-rest-api/fm/fm/db/sqlalchemy/migration.py new file mode 100644 index 00000000..e996e86e --- /dev/null +++ b/fm-rest-api/fm/fm/db/sqlalchemy/migration.py @@ -0,0 +1,75 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import os + +import sqlalchemy +from oslo_db.sqlalchemy import enginefacade + +from migrate import exceptions as versioning_exceptions +from migrate.versioning import api as versioning_api +from migrate.versioning.repository import Repository + +from fm.common import exceptions +from fm.db import migration +from fm.common.i18n import _ + +_REPOSITORY = None + +get_engine = enginefacade.get_legacy_facade().get_engine + + +def db_sync(version=None): + if version is not None: + try: + version = int(version) + except ValueError: + raise exceptions.ApiError(_("version should be an integer")) + + current_version = db_version() + repository = _find_migrate_repo() + if version is None or version > current_version: + return versioning_api.upgrade(get_engine(), repository, version) + else: + return versioning_api.downgrade(get_engine(), repository, + version) + + +def db_version(): + repository = _find_migrate_repo() + try: + return versioning_api.db_version(get_engine(), repository) + except versioning_exceptions.DatabaseNotControlledError: + meta = sqlalchemy.MetaData() + engine = get_engine() + meta.reflect(bind=engine) + tables = meta.tables + if len(tables) == 0: + db_version_control(migration.INIT_VERSION) + return versioning_api.db_version(get_engine(), repository) + else: + # Some pre-Essex DB's may not be version controlled. + # Require them to upgrade using Essex first. + raise exceptions.ApiError( + _("Upgrade DB using Essex release first.")) + + +def db_version_control(version=None): + repository = _find_migrate_repo() + versioning_api.version_control(get_engine(), repository, version) + return version + + +def _find_migrate_repo(): + """Get the path for the migrate repository.""" + global _REPOSITORY + path = os.path.join(os.path.abspath(os.path.dirname(__file__)), + 'migrate_repo') + assert os.path.exists(path) + if _REPOSITORY is None: + _REPOSITORY = Repository(path) + return _REPOSITORY diff --git a/fm-rest-api/fm/fm/db/sqlalchemy/models.py b/fm-rest-api/fm/fm/db/sqlalchemy/models.py new file mode 100755 index 00000000..824b3ebc --- /dev/null +++ b/fm-rest-api/fm/fm/db/sqlalchemy/models.py @@ -0,0 +1,129 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 +# -*- encoding: utf-8 -*- +# +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import json +import urlparse + +from oslo_config import cfg + +from sqlalchemy import Column, ForeignKey, Integer, Boolean +from sqlalchemy import String +from sqlalchemy import DateTime +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.types import TypeDecorator, VARCHAR +from oslo_db.sqlalchemy import models + + +def table_args(): + engine_name = urlparse.urlparse(cfg.CONF.database_connection).scheme + if engine_name == 'mysql': + return {'mysql_engine': 'InnoDB', + 'mysql_charset': "utf8"} + return None + + +class JSONEncodedDict(TypeDecorator): + """Represents an immutable structure as a json-encoded string.""" + + impl = VARCHAR + + def process_bind_param(self, value, dialect): + if value is not None: + value = json.dumps(value) + return value + + def process_result_value(self, value, dialect): + if value is not None: + value = json.loads(value) + return value + + +class FmBase(models.TimestampMixin, models.ModelBase): + + metadata = None + + def as_dict(self): + d = {} + for c in self.__table__.columns: + d[c.name] = self[c.name] + return d + + +Base = declarative_base(cls=FmBase) + + +class Alarm(Base): + __tablename__ = 'alarm' + + id = Column(Integer, primary_key=True, nullable=False) + uuid = Column(String(255), unique=True, index=True) + alarm_id = Column('alarm_id', String(255), + ForeignKey('event_suppression.alarm_id'), + nullable=True, index=True) + alarm_state = Column(String(255)) + entity_type_id = Column(String(255), index=True) + entity_instance_id = Column(String(255), index=True) + timestamp = Column(DateTime(timezone=False)) + severity = Column(String(255), index=True) + reason_text = Column(String(255)) + alarm_type = Column(String(255), index=True) + probable_cause = Column(String(255)) + proposed_repair_action = Column(String(255)) + service_affecting = Column(Boolean, default=False) + suppression = Column(Boolean, default=False) + inhibit_alarms = Column(Boolean, default=False) + masked = Column(Boolean, default=False) + + +class EventLog(Base): + __tablename__ = 'event_log' + + id = Column(Integer, primary_key=True, nullable=False) + uuid = Column(String(255), unique=True, index=True) + event_log_id = Column('event_log_id', String(255), + ForeignKey('event_suppression.alarm_id'), + nullable=True, index=True) + state = Column(String(255)) + entity_type_id = Column(String(255), index=True) + entity_instance_id = Column(String(255), index=True) + timestamp = Column(DateTime(timezone=False)) + severity = Column(String(255), index=True) + reason_text = Column(String(255)) + event_log_type = Column(String(255), index=True) + probable_cause = Column(String(255)) + proposed_repair_action = Column(String(255)) + service_affecting = Column(Boolean, default=False) + suppression = Column(Boolean, default=False) + + +class EventSuppression(Base): + __tablename__ = 'event_suppression' + + id = Column('id', Integer, primary_key=True, nullable=False) + uuid = Column('uuid', String(36), unique=True) + alarm_id = Column('alarm_id', String(255), unique=True) + description = Column('description', String(255)) + suppression_status = Column('suppression_status', String(255)) + set_for_deletion = Column('set_for_deletion', Boolean) + mgmt_affecting = Column('mgmt_affecting', String(255)) + degrade_affecting = Column('degrade_affecting', String(255)) diff --git a/fm-rest-api/fm/fm/objects/__init__.py b/fm-rest-api/fm/fm/objects/__init__.py new file mode 100644 index 00000000..71dafd39 --- /dev/null +++ b/fm-rest-api/fm/fm/objects/__init__.py @@ -0,0 +1,40 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import functools + +from fm.objects import alarm +from fm.objects import event_log +from fm.objects import event_suppression + + +def objectify(klass): + """Decorator to convert database results into specified objects. + :param klass: database results class + """ + + def the_decorator(fn): + @functools.wraps(fn) + def wrapper(*args, **kwargs): + result = fn(*args, **kwargs) + try: + return klass.from_db_object(result) + except TypeError: + return [klass.from_db_object(obj) for obj in result] + + return wrapper + + return the_decorator + + +alarm = alarm.Alarm +event_log = event_log.EventLog +event_suppression = event_suppression.EventSuppression + +__all__ = (alarm, + event_log, + event_suppression) diff --git a/fm-rest-api/fm/fm/objects/alarm.py b/fm-rest-api/fm/fm/objects/alarm.py new file mode 100755 index 00000000..1229c497 --- /dev/null +++ b/fm-rest-api/fm/fm/objects/alarm.py @@ -0,0 +1,69 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +from oslo_versionedobjects import base as object_base + +from fm.db import api as db_api +from fm.objects import base +from fm.objects import utils +from fm.common import constants + + +class Alarm(base.FmObject): + + VERSION = '1.0' + + dbapi = db_api.get_instance() + + fields = { + 'id': int, + 'uuid': utils.str_or_none, + 'alarm_id': utils.str_or_none, + 'alarm_state': utils.str_or_none, + 'entity_type_id': utils.str_or_none, + 'entity_instance_id': utils.str_or_none, + 'timestamp': utils.datetime_or_str_or_none, + 'severity': utils.str_or_none, + 'reason_text': utils.str_or_none, + 'alarm_type': utils.str_or_none, + 'probable_cause': utils.str_or_none, + 'proposed_repair_action': utils.str_or_none, + 'service_affecting': utils.str_or_none, + 'suppression': utils.str_or_none, + 'inhibit_alarms': utils.str_or_none, + 'masked': utils.str_or_none, + 'suppression_status': utils.str_or_none, + 'mgmt_affecting': utils.str_or_none, + 'degrade_affecting': utils.str_or_none, + } + + @staticmethod + def _from_db_object(server, db_server): + """Converts a database entity to a formal object.""" + + if isinstance(db_server, tuple): + db_server_fields = db_server[0] + db_suppress_status = db_server[constants.DB_SUPPRESS_STATUS] + db_mgmt_affecting = db_server[constants.DB_MGMT_AFFECTING] + db_degrade_affecting = db_server[constants.DB_DEGRADE_AFFECTING] + db_server_fields['suppression_status'] = db_suppress_status + db_server_fields['mgmt_affecting'] = db_mgmt_affecting + db_server_fields['degrade_affecting'] = db_degrade_affecting + else: + db_server_fields = db_server + + for field in server.fields: + server[field] = db_server_fields[field] + + server.obj_reset_changes() + return server + + @object_base.remotable_classmethod + def get_by_uuid(cls, context, uuid): + return cls.dbapi.alarm_get(uuid) + + def save_changes(self, context, updates): + self.dbapi.alarm_update(self.uuid, updates) diff --git a/fm-rest-api/fm/fm/objects/base.py b/fm-rest-api/fm/fm/objects/base.py new file mode 100644 index 00000000..e85d1328 --- /dev/null +++ b/fm-rest-api/fm/fm/objects/base.py @@ -0,0 +1,87 @@ +# Copyright 2013 IBM Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +from oslo_versionedobjects import base as object_base +from oslo_versionedobjects import fields as object_fields + + +class FmObject(object_base.VersionedObject): + """Base class and object factory. + + This forms the base of all objects that can be remoted or instantiated + via RPC. Simply defining a class that inherits from this base class + will make it remotely instantiatable. Objects should implement the + necessary "get" classmethod routines as well as "save" object methods + as appropriate. + """ + + OBJ_SERIAL_NAMESPACE = 'fm_object' + OBJ_PROJECT_NAMESPACE = 'fm' + + fields = { + 'created_at': object_fields.DateTimeField(nullable=True), + 'updated_at': object_fields.DateTimeField(nullable=True), + } + + def __getitem__(self, name): + return getattr(self, name) + + def __setitem__(self, name, value): + setattr(self, name, value) + + def as_dict(self): + return dict((k, getattr(self, k)) + for k in self.fields + if hasattr(self, k)) + + def obj_refresh(self, loaded_object): + """Applies updates for objects that inherit from base.FmObject. + + Checks for updated attributes in an object. Updates are applied from + the loaded object column by column in comparison with the current + object. + """ + for field in self.fields: + if (self.obj_attr_is_set(field) and + self[field] != loaded_object[field]): + self[field] = loaded_object[field] + + @staticmethod + def _from_db_object(obj, db_object): + """Converts a database entity to a formal object. + + :param obj: An object of the class. + :param db_object: A DB model of the object + :return: The object of the class with the database entity added + """ + + for field in obj.fields: + obj[field] = db_object[field] + + obj.obj_reset_changes() + return obj + + @classmethod + def from_db_object(cls, db_obj): + return cls._from_db_object(cls(), db_obj) + + +class FmObjectSerializer(object_base.VersionedObjectSerializer): + # Base class to use for object hydration + OBJ_BASE_CLASS = FmObject diff --git a/fm-rest-api/fm/fm/objects/event_log.py b/fm-rest-api/fm/fm/objects/event_log.py new file mode 100644 index 00000000..2964cab9 --- /dev/null +++ b/fm-rest-api/fm/fm/objects/event_log.py @@ -0,0 +1,60 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +from oslo_log import log +from oslo_versionedobjects import base as object_base + +from fm.db import api as db_api +from fm.objects import base +from fm.objects import utils + +LOG = log.getLogger('event_log') + + +class EventLog(base.FmObject): + + VERSION = '1.0' + + dbapi = db_api.get_instance() + + fields = { + 'id': int, + 'uuid': utils.str_or_none, + 'event_log_id': utils.str_or_none, + 'state': utils.str_or_none, + 'entity_type_id': utils.str_or_none, + 'entity_instance_id': utils.str_or_none, + 'timestamp': utils.datetime_or_str_or_none, + 'severity': utils.str_or_none, + 'reason_text': utils.str_or_none, + 'event_log_type': utils.str_or_none, + 'probable_cause': utils.str_or_none, + 'proposed_repair_action': utils.str_or_none, + 'service_affecting': utils.str_or_none, + 'suppression': utils.str_or_none, + 'suppression_status': utils.str_or_none, + } + + @staticmethod + def _from_db_object(server, db_server): + """Converts a database entity to a formal object.""" + + if isinstance(db_server, tuple): + db_server_fields = db_server[0] + db_suppress_status = db_server[1] + db_server_fields['suppression_status'] = db_suppress_status + else: + db_server_fields = db_server + + for field in server.fields: + server[field] = db_server_fields[field] + + server.obj_reset_changes() + return server + + @object_base.remotable_classmethod + def get_by_uuid(cls, context, uuid): + return cls.dbapi.event_log_get(uuid) diff --git a/fm-rest-api/fm/fm/objects/event_suppression.py b/fm-rest-api/fm/fm/objects/event_suppression.py new file mode 100644 index 00000000..19ff0921 --- /dev/null +++ b/fm-rest-api/fm/fm/objects/event_suppression.py @@ -0,0 +1,30 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +from oslo_versionedobjects import base as object_base + +from fm.db import api as db_api +from fm.objects import base +from fm.objects import utils + + +class EventSuppression(base.FmObject): + + VERSION = '1.0' + + dbapi = db_api.get_instance() + + fields = { + 'id': int, + 'uuid': utils.uuid_or_none, + 'alarm_id': utils.str_or_none, + 'description': utils.str_or_none, + 'suppression_status': utils.str_or_none, + } + + @object_base.remotable_classmethod + def get_by_uuid(cls, context, uuid): + return cls.dbapi.event_suppression_get(uuid) diff --git a/fm-rest-api/fm/fm/objects/utils.py b/fm-rest-api/fm/fm/objects/utils.py new file mode 100644 index 00000000..459381a4 --- /dev/null +++ b/fm-rest-api/fm/fm/objects/utils.py @@ -0,0 +1,93 @@ +# Copyright 2013 IBM Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +"""Utility methods for objects""" + +import datetime +import iso8601 +import uuid +import six + +from fm.common.i18n import _ +from fm.common import timeutils + + +def datetime_or_none(dt): + """Validate a datetime or None value.""" + if dt is None: + return None + elif isinstance(dt, datetime.datetime): + if dt.utcoffset() is None: + # NOTE(danms): Legacy objects from sqlalchemy are stored in UTC, + # but are returned without a timezone attached. + # As a transitional aid, assume a tz-naive object is in UTC. + return dt.replace(tzinfo=iso8601.iso8601.Utc()) + else: + return dt + raise ValueError('A datetime.datetime is required here') + + +def datetime_or_str_or_none(val): + if isinstance(val, str): + return timeutils.parse_isotime(val) + return datetime_or_none(val) + + +def int_or_none(val): + """Attempt to parse an integer value, or None.""" + if val is None: + return val + else: + return int(val) + + +def str_or_none(val): + """Attempt to stringify a value, or None.""" + if val is None: + return val + else: + return six.text_type(val) + + +def uuid_or_none(val): + """Attempt to dictify a value, or None.""" + if val is None: + return None + elif isinstance(val, str): + return str(uuid.UUID(val.strip())) + raise ValueError(_('Invalid UUID value %s') % val) + + +def dt_serializer(name): + """Return a datetime serializer for a named attribute.""" + def serializer(self, name=name): + if getattr(self, name) is not None: + return timeutils.isotime(getattr(self, name)) + else: + return None + return serializer + + +def dt_deserializer(instance, val): + """A deserializer method for datetime attributes.""" + if val is None: + return None + else: + return timeutils.parse_isotime(val) diff --git a/fm-rest-api/fm/scripts/fm-api b/fm-rest-api/fm/scripts/fm-api new file mode 100644 index 00000000..0988fc6e --- /dev/null +++ b/fm-rest-api/fm/scripts/fm-api @@ -0,0 +1,147 @@ +#! /bin/sh +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# +### BEGIN INIT INFO +# Provides: fm-api +# Required-Start: $remote_fs $syslog +# Required-Stop: $remote_fs $syslog +# Default-Start: 3 5 +# Default-Stop: 0 1 2 6 +# Short-Description: Fault Management REST API Service +# Description: Fault Management REST API Service +### END INIT INFO + +. /etc/init.d/functions + +# Linux Standard Base (LSB) Error Codes +RETVAL=0 +GENERIC_ERROR=1 +INVALID_ARGS=2 +UNSUPPORTED_FEATURE=3 +NOT_INSTALLED=5 +NOT_RUNNING=7 + +NAME="fm-api" +DAEMON="/usr/bin/${NAME}" +PIDFILE="/var/run/${NAME}.pid" +CONFIGFILE="/etc/fm/fm.conf" + +if ! [ -x ${DAEMON} ] ; then + logger "${DAEMON} is missing" + exit ${NOT_INSTALLED} +fi + +PATH=/sbin:/usr/sbin:/bin:/usr/bin:/usr/local/bin +export PATH + +status() +{ + # Status function has a standard set of return codes to indicate daemon status + # http://refspecs.linuxbase.org/LSB_3.1.0/LSB-Core-generic/LSB-Core-generic/iniscrptact.html + + local my_processes=`pgrep -l -f "^(python|/usr/bin/python|/usr/bin/python2) ${DAEMON}([^\w-]|$)"` + + if [ -z "${my_processes}" ]; then + echo "$NAME is not running" + return 1 + fi + + echo "$NAME is running" + return 0 +} + +start () +{ + status >/dev/null + if [ $? -eq 0 ]; then + echo "$NAME is already running" + return 0 + fi + + # Delete stale pidfile, if any + rm -f $PIDFILE + + start-stop-daemon --start -b --make-pidfile --pidfile $PIDFILE -x ${DAEMON} -- --config-file=${CONFIGFILE} + RETVAL=$? + if [ ${RETVAL} -eq 0 ]; then + status >/dev/null + if [ $? -eq 0 ]; then + logger -t $NAME "start OK" + echo "OK" + return 0 + fi + logger -t $NAME "start-stop-daemon returned 0, but status fails" + rm -f $PIDFILE + fi + logger -t $NAME "start failed" + return ${GENERIC_ERROR} +} + +confirm_stop() +{ + local my_processes=`pgrep -l -f "^(python|/usr/bin/python|/usr/bin/python2) ${DAEMON}([^\w-]|$)"` + + if [ -n "${my_processes}" ] + then + logger -t $NAME "About to SIGKILL the following: ${my_processes}" + pkill -KILL -f "^(python|/usr/bin/python|/usr/bin/python2) ${DAEMON}([^\w-]|$)" + fi +} + +stop () +{ + status >/dev/null + if [ $? -ne 0 ]; then + echo "$NAME is not running" + return 0 + fi + + echo -n "Stopping ${NAME}: " + if [ -f $PIDFILE ]; then + start-stop-daemon --stop --quiet --retry 3 --oknodo --pidfile $PIDFILE + fi + + confirm_stop + rm -f $PIDFILE + + # Confirm status + status >/dev/null + if [ $? -ne 0 ]; then + echo "Stopped" + return 0 + else + echo "Failed" + return ${GENERIC_ERROR} + fi +} + +rc=0 + +case "$1" in + start) + start + rc=$? + ;; + stop) + stop + rc=$? + ;; + restart|force-reload|reload) + stop + start + rc=$? + ;; + status) + status + rc=$? + ;; + *) + echo "Usage: $0 {start|stop|force-reload|restart|reload|status}" + exit 1 + ;; +esac + +exit $rc diff --git a/fm-rest-api/fm/scripts/fm-api.service b/fm-rest-api/fm/scripts/fm-api.service new file mode 100644 index 00000000..0256df77 --- /dev/null +++ b/fm-rest-api/fm/scripts/fm-api.service @@ -0,0 +1,15 @@ +[Unit] +Description=Fault Management REST API Service +After=nfscommon.service sw-patch.service +After=network-online.target systemd-udev-settle.service + +[Service] +Type=simple +RemainAfterExit=yes +User=root +ExecStart=/etc/rc.d/init.d/fm-api start +ExecStop=/etc/rc.d/init.d/fm-api stop +PIDFile=/var/run/fm-api.pid + +[Install] +WantedBy=multi-user.target diff --git a/fm-rest-api/fm/setup.cfg b/fm-rest-api/fm/setup.cfg new file mode 100644 index 00000000..19712048 --- /dev/null +++ b/fm-rest-api/fm/setup.cfg @@ -0,0 +1,50 @@ +[metadata] +name = fm +version = 1.0.0 +summary = CGTS Fault Management API service +classifier = + Environment :: OpenStack + Intended Audience :: Information Technology + Intended Audience :: System Administrators + License :: OSI Approved :: Apache Software License + Operating System :: POSIX :: Linux + Programming Language :: Python + Programming Language :: Python :: 2 + Programming Language :: Python :: 2.7 + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.5 + +[global] +setup-hooks = + pbr.hooks.setup_hook + +[files] +packages = + fm + +[entry_points] +console_scripts = + fm-api = fm.cmd.api:main + fm-dbsync = fm.cmd.dbsync:main +fm.database.migration_backend = + sqlalchemy = oslo_db.sqlalchemy.migration + +[build_sphinx] +all_files = 1 +build-dir = doc/build +source-dir = doc/source +warning-is-error = 1 + +[egg_info] +tag_build = +tag_date = 0 +tag_svn_revision = 0 + +[extract_messages] +keywords = _ gettext ngettext l_ lazy_gettext +mapping_file = babel.cfg +output_file = fm/locale/fm.pot + +[wheel] +universal = 1 + diff --git a/fm-rest-api/fm/setup.py b/fm-rest-api/fm/setup.py new file mode 100644 index 00000000..209eeafa --- /dev/null +++ b/fm-rest-api/fm/setup.py @@ -0,0 +1,43 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT + +# In python < 2.7.4, a lazy loading of package `pbr` will break +# setuptools if some other modules registered functions in `atexit`. +# solution from: http://bugs.python.org/issue15881#msg170215 + +from setuptools import setup, find_packages + +setup( + name='fm', + description='Titanium Cloud Fault Management', + version='1.0.0', + license='windriver', + platforms=['any'], + provides=['fm'], + packages=find_packages(), + package_data={}, + include_package_data=False, + entry_points={ + 'fm.database.migration_backend': [ + 'sqlalchemy = oslo_db.sqlalchemy.migration', + ], + 'console_scripts': [ + 'fm-dbsync = fm.cmd.dbsync:main', + 'fm-api = fm.cmd.api:main' + ], + } +) diff --git a/python-fmclient/PKG-INFO b/python-fmclient/PKG-INFO new file mode 100644 index 00000000..7ee7800d --- /dev/null +++ b/python-fmclient/PKG-INFO @@ -0,0 +1,13 @@ +Metadata-Version: 1.1 +Name: python-fmclient +Version: 1.0 +Summary: A python client library for Fault Management +Home-page: +Author: Windriver +Author-email: info@windriver.com +License: windriver + +A python client library for Fault Management + + +Platform: UNKNOWN diff --git a/python-fmclient/centos/build_srpm.data b/python-fmclient/centos/build_srpm.data new file mode 100644 index 00000000..ac0082f8 --- /dev/null +++ b/python-fmclient/centos/build_srpm.data @@ -0,0 +1,2 @@ +SRC_DIR="fmclient" +TIS_PATCH_VER=1 diff --git a/python-fmclient/centos/python-fmclient.spec b/python-fmclient/centos/python-fmclient.spec new file mode 100644 index 00000000..cf20b01a --- /dev/null +++ b/python-fmclient/centos/python-fmclient.spec @@ -0,0 +1,76 @@ +%global pypi_name fmclient + +Summary: A python client library for Fault Management +Name: python-fmclient +Version: 1.0 +Release: %{tis_patch_ver}%{?_tis_dist} +License: windriver +Group: base +Packager: Wind River +URL: unknown +Source0: %{name}-%{version}.tar.gz + +BuildRequires: git +BuildRequires: python-pbr >= 2.0.0 +BuildRequires: python-setuptools + +Requires: python-keystoneauth1 >= 3.1.0 +Requires: python-pbr >= 2.0.0 +Requires: python-six >= 1.9.0 +Requires: python-oslo-i18n >= 2.1.0 +Requires: python-oslo-utils >= 3.20.0 +Requires: python-requests + +%description +A python client library for Fault Management + +%define local_bindir /usr/bin/ +%define pythonroot /usr/lib64/python2.7/site-packages + +%define debug_package %{nil} + +%package sdk +Summary: SDK files for %{name} + +%description sdk +Contains SDK files for %{name} package + +%prep +%autosetup -n %{name}-%{version} -S git + +# Remove bundled egg-info +rm -rf *.egg-info + +%build +echo "Start build" + +export PBR_VERSION=%{version} +%{__python} setup.py build + +%install +echo "Start install" +export PBR_VERSION=%{version} +%{__python} setup.py install --root=%{buildroot} \ + --install-lib=%{pythonroot} \ + --prefix=/usr \ + --install-data=/usr/share \ + --single-version-externally-managed + +# prep SDK package +mkdir -p %{buildroot}/usr/share/remote-clients +tar zcf %{buildroot}/usr/share/remote-clients/%{name}-%{version}.tgz --exclude='.gitignore' --exclude='.gitreview' -C .. %{name}-%{version} + +%clean +echo "CLEAN CALLED" +rm -rf $RPM_BUILD_ROOT + +%files +%defattr(-,root,root,-) +%doc LICENSE +%{local_bindir}/* +%{pythonroot}/%{pypi_name}/* +%{pythonroot}/%{pypi_name}-%{version}*.egg-info + +%files sdk +/usr/share/remote-clients/%{name}-%{version}.tgz + diff --git a/python-fmclient/fmclient/LICENSE b/python-fmclient/fmclient/LICENSE new file mode 100755 index 00000000..68c771a0 --- /dev/null +++ b/python-fmclient/fmclient/LICENSE @@ -0,0 +1,176 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + diff --git a/python-fmclient/fmclient/fmclient/__init__.py b/python-fmclient/fmclient/fmclient/__init__.py new file mode 100644 index 00000000..4c69ecc4 --- /dev/null +++ b/python-fmclient/fmclient/fmclient/__init__.py @@ -0,0 +1,22 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +try: + import fmclient.client + Client = fmclient.client.get_client +except ImportError: + import warnings + warnings.warn("Could not import fmclient.client", ImportWarning) + +import pbr.version + +version_info = pbr.version.VersionInfo('fmclient') + +try: + __version__ = version_info.version_string() +except AttributeError: + __version__ = None diff --git a/python-fmclient/fmclient/fmclient/client.py b/python-fmclient/fmclient/fmclient/client.py new file mode 100644 index 00000000..66aa959a --- /dev/null +++ b/python-fmclient/fmclient/fmclient/client.py @@ -0,0 +1,93 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +from oslo_utils import importutils +from keystoneauth1 import loading + +from fmclient.common.i18n import _ +from fmclient import exc + + +SERVICE_TYPE = 'faultmanagement' + + +def get_client(version, endpoint=None, session=None, auth_token=None, + fm_url=None, username=None, password=None, auth_url=None, + project_id=None, project_name=None, + region_name=None, timeout=None, + user_domain_id=None, user_domain_name=None, + project_domain_id=None, project_domain_name=None, + service_type=SERVICE_TYPE, endpoint_type=None, + **ignored_kwargs): + """Get an authenticated client, based on the credentials.""" + kwargs = {} + interface = endpoint_type or 'publicURL' + endpoint = endpoint or fm_url + if auth_token and endpoint: + kwargs.update({ + 'token': auth_token, + }) + if timeout: + kwargs.update({ + 'timeout': timeout, + }) + elif auth_url: + auth_kwargs = {} + auth_type = 'password' + auth_kwargs.update({ + 'auth_url': auth_url, + 'project_id': project_id, + 'project_name': project_name, + 'user_domain_id': user_domain_id, + 'user_domain_name': user_domain_name, + 'project_domain_id': project_domain_id, + 'project_domain_name': project_domain_name, + }) + if username and password: + auth_kwargs.update({ + 'username': username, + 'password': password + }) + elif auth_token: + auth_type = 'token' + auth_kwargs.update({ + 'token': auth_token, + }) + + # Create new session only if it was not passed in + if not session: + loader = loading.get_plugin_loader(auth_type) + auth_plugin = loader.load_from_options(**auth_kwargs) + session = loading.session.Session().load_from_options( + auth=auth_plugin, timeout=timeout) + + exception_msg = _('Must provide Keystone credentials or user-defined ' + 'endpoint and token') + if not endpoint: + if session: + try: + endpoint = session.get_endpoint( + service_type=service_type, + interface=interface, + region_name=region_name + ) + except Exception as e: + raise exc.AuthSystem( + _('%(message)s, error was: %(error)s') % + {'message': exception_msg, 'error': e}) + else: + # Neither session, nor valid auth parameters provided + raise exc.AuthSystem(exception_msg) + + kwargs['endpoint_override'] = endpoint + kwargs['service_type'] = service_type + kwargs['interface'] = interface + kwargs['version'] = version + + fm_module = importutils.import_versioned_module('fmclient', + version, 'client') + client_class = getattr(fm_module, 'Client') + return client_class(endpoint, session=session, **kwargs) diff --git a/python-fmclient/fmclient/fmclient/common/__init__.py b/python-fmclient/fmclient/fmclient/common/__init__.py new file mode 100644 index 00000000..b98b5055 --- /dev/null +++ b/python-fmclient/fmclient/fmclient/common/__init__.py @@ -0,0 +1,5 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/python-fmclient/fmclient/fmclient/common/base.py b/python-fmclient/fmclient/fmclient/common/base.py new file mode 100644 index 00000000..0d7f2bd7 --- /dev/null +++ b/python-fmclient/fmclient/fmclient/common/base.py @@ -0,0 +1,149 @@ +# Copyright 2013 Wind River, Inc. +# Copyright 2012 OpenStack LLC. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Base utilities to build API operation managers and objects on top of. +""" + +import copy + + +def getid(obj): + """Abstracts the common pattern of allowing both an object or an + object's ID (UUID) as a parameter when dealing with relationships. + """ + try: + return obj.id + except AttributeError: + return obj + + +class Manager(object): + """Managers interact with a particular type of API and provide CRUD + operations for them. + """ + resource_class = None + + def __init__(self, api): + self.api = api + + def _create(self, url, body): + resp, body = self.post(url, body=body) + if body: + return self.resource_class(self, body) + + def _upload(self, url, body, data=None): + resp = self.api.post(url, body=body, data=data) + return resp + + def _json_get(self, url, body=None): + """send a GET request and return a json serialized object""" + resp, body = self.api.get(url, body=body) + return body + + def _format_body_data(self, body, response_key): + if response_key: + try: + data = body[response_key] + except KeyError: + return [] + else: + data = body + + if not isinstance(data, list): + data = [data] + + return data + + def _list(self, url, response_key=None, obj_class=None, body=None): + resp, body = self.api.get(url) + + if obj_class is None: + obj_class = self.resource_class + + data = self._format_body_data(body, response_key) + return [obj_class(self, res, loaded=True) for res in data if res] + + def _update(self, url, **kwargs): + resp, body = self.api.patch(url, **kwargs) + # PATCH/PUT requests may not return a body + if body: + return self.resource_class(self, body) + + def _delete(self, url): + self.api.delete(url) + + +class Resource(object): + """A resource represents a particular instance of an object (tenant, user, + etc). This is pretty much just a bag for attributes. + + :param manager: Manager object + :param info: dictionary representing resource attributes + :param loaded: prevent lazy-loading if set to True + """ + def __init__(self, manager, info, loaded=False): + self.manager = manager + self._info = info + self._add_details(info) + self._loaded = loaded + + def _add_details(self, info): + for (k, v) in info.iteritems(): + setattr(self, k, v) + + def __getattr__(self, k): + if k not in self.__dict__: + # NOTE(bcwaldon): disallow lazy-loading if already loaded once + if not self.is_loaded(): + self.get() + return self.__getattr__(k) + + raise AttributeError(k) + else: + return self.__dict__[k] + + def __repr__(self): + reprkeys = sorted(k for k in self.__dict__.keys() if k[0] != '_' and + k != 'manager') + info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys) + return "<%s %s>" % (self.__class__.__name__, info) + + def get(self): + # set_loaded() first ... so if we have to bail, we know we tried. + self.set_loaded(True) + if not hasattr(self.manager, 'get'): + return + + new = self.manager.get(self.id) + if new: + self._add_details(new._info) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + if hasattr(self, 'id') and hasattr(other, 'id'): + return self.id == other.id + return self._info == other._info + + def is_loaded(self): + return self._loaded + + def set_loaded(self, val): + self._loaded = val + + def to_dict(self): + return copy.deepcopy(self._info) diff --git a/python-fmclient/fmclient/fmclient/common/cli_no_wrap.py b/python-fmclient/fmclient/fmclient/common/cli_no_wrap.py new file mode 100644 index 00000000..861a08c9 --- /dev/null +++ b/python-fmclient/fmclient/fmclient/common/cli_no_wrap.py @@ -0,0 +1,42 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +""" +The sole purpose of this module is to manage access to the _no_wrap variable +used by the wrapping_formatters module +""" + +_no_wrap = [False] + + +def is_nowrap_set(no_wrap=None): + """ + returns True if no wrapping desired. + determines this by either the no_wrap parameter + or if the global no_wrap flag is set + :param no_wrap: + :return: + """ + global _no_wrap + if no_wrap is True: + return True + if no_wrap is False: + return False + no_wrap = _no_wrap[0] + return no_wrap + + +def set_no_wrap(no_wrap): + """ + Sets the global nowrap flag + then returns result of call to is_nowrap_set(..) + :param no_wrap: + :return: + """ + global _no_wrap + if no_wrap is not None: + _no_wrap[0] = no_wrap + return is_nowrap_set(no_wrap) diff --git a/python-fmclient/fmclient/fmclient/common/exceptions.py b/python-fmclient/fmclient/fmclient/common/exceptions.py new file mode 100644 index 00000000..4c9578c5 --- /dev/null +++ b/python-fmclient/fmclient/fmclient/common/exceptions.py @@ -0,0 +1,170 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import inspect +import sys +import six +from six.moves import http_client +from fmclient.common.i18n import _ + + +class ClientException(Exception): + """An error occurred.""" + def __init__(self, message=None): + self.message = message + + def __str__(self): + return self.message or self.__class__.__doc__ + + +class InvalidEndpoint(ClientException): + """The provided endpoint is invalid.""" + + +class EndpointException(ClientException): + """Something is rotten in Service Catalog.""" + + +class CommunicationError(ClientException): + """Unable to communicate with server.""" + + +class Conflict(ClientException): + """HTTP 409 - Conflict. + + Indicates that the request could not be processed because of conflict + in the request, such as an edit conflict. + """ + http_status = http_client.CONFLICT + message = _("Conflict") + + +# _code_map contains all the classes that have http_status attribute. +_code_map = dict( + (getattr(obj, 'http_status', None), obj) + for name, obj in vars(sys.modules[__name__]).items() + if inspect.isclass(obj) and getattr(obj, 'http_status', False) +) + + +class HttpError(ClientException): + """The base exception class for all HTTP exceptions.""" + http_status = 0 + message = _("HTTP Error") + + def __init__(self, message=None, details=None, + response=None, request_id=None, + url=None, method=None, http_status=None): + self.http_status = http_status or self.http_status + self.message = message or self.message + self.details = details + self.request_id = request_id + self.response = response + self.url = url + self.method = method + formatted_string = "%s (HTTP %s)" % (self.message, self.http_status) + if request_id: + formatted_string += " (Request-ID: %s)" % request_id + super(HttpError, self).__init__(formatted_string) + + +class HTTPRedirection(HttpError): + """HTTP Redirection.""" + message = _("HTTP Redirection") + + +class HTTPClientError(HttpError): + """Client-side HTTP error. + + Exception for cases in which the client seems to have erred. + """ + message = _("HTTP Client Error") + + +class HttpServerError(HttpError): + """Server-side HTTP error. + + Exception for cases in which the server is aware that it has + erred or is incapable of performing the request. + """ + message = _("HTTP Server Error") + + +class ServiceUnavailable(HttpServerError): + """HTTP 503 - Service Unavailable. + + The server is currently unavailable. + """ + http_status = http_client.SERVICE_UNAVAILABLE + message = _("Service Unavailable") + + +class GatewayTimeout(HttpServerError): + """HTTP 504 - Gateway Timeout. + + The server was acting as a gateway or proxy and did not receive a timely + response from the upstream server. + """ + http_status = http_client.GATEWAY_TIMEOUT + message = "Gateway Timeout" + + +class HttpVersionNotSupported(HttpServerError): + """HTTP 505 - HttpVersion Not Supported. + + The server does not support the HTTP protocol version used in the request. + """ + http_status = http_client.HTTP_VERSION_NOT_SUPPORTED + message = "HTTP Version Not Supported" + + +def from_response(response, method, url=None): + """Returns an instance of :class:`HttpError` or subclass based on response. + + :param response: instance of `requests.Response` class + :param method: HTTP method used for request + :param url: URL used for request + """ + + req_id = response.headers.get("x-openstack-request-id") + kwargs = { + "http_status": response.status_code, + "response": response, + "method": method, + "url": url, + "request_id": req_id, + } + if "retry-after" in response.headers: + kwargs["retry_after"] = response.headers["retry-after"] + + content_type = response.headers.get("Content-Type", "") + if content_type.startswith("application/json"): + try: + body = response.json() + except ValueError: + pass + else: + if isinstance(body, dict): + error = body.get(list(body)[0]) + if isinstance(error, dict): + kwargs["message"] = (error.get("message") or + error.get("faultstring")) + kwargs["details"] = (error.get("details") or + six.text_type(body)) + elif content_type.startswith("text/"): + kwargs["details"] = getattr(response, 'text', '') + + try: + cls = _code_map[response.status_code] + except KeyError: + if 500 <= response.status_code < 600: + cls = HttpServerError + elif 400 <= response.status_code < 500: + cls = HTTPClientError + else: + cls = HttpError + return cls(**kwargs) diff --git a/python-fmclient/fmclient/fmclient/common/http.py b/python-fmclient/fmclient/fmclient/common/http.py new file mode 100644 index 00000000..cf9d4acd --- /dev/null +++ b/python-fmclient/fmclient/fmclient/common/http.py @@ -0,0 +1,363 @@ +# Copyright 2012 OpenStack LLC. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import copy +import logging +import socket +import json +import six + +from keystoneauth1 import adapter +from keystoneauth1 import exceptions as ksa_exc + +from oslo_utils import encodeutils +from oslo_utils import importutils +from oslo_utils import netutils +import requests +import OpenSSL + + +from fmclient.common import utils +from fmclient.common import exceptions as exc + +osprofiler_web = importutils.try_import("osprofiler.web") + +LOG = logging.getLogger(__name__) + +DEFAULT_VERSION = '1' +USER_AGENT = 'python-fmclient' +CHUNKSIZE = 1024 * 64 # 64kB +REQ_ID_HEADER = 'X-OpenStack-Request-ID' + +API_VERSION = '/v1' +API_VERSION_SELECTED_STATES = ('user', 'negotiated', 'cached', 'default') + +SENSITIVE_HEADERS = ('X-Auth-Token',) + +SUPPORTED_ENDPOINT_SCHEME = ('http', 'https') + + +def encode_headers(headers): + """Encodes headers. + + Note: This should be used right before + sending anything out. + + :param headers: Headers to encode + :returns: Dictionary with encoded headers' + names and values + """ + return dict((encodeutils.safe_encode(h), encodeutils.safe_encode(v)) + for h, v in headers.items() if v is not None) + + +class _BaseHTTPClient(object): + + @staticmethod + def _chunk_body(body): + chunk = body + while chunk: + chunk = body.read(CHUNKSIZE) + if not chunk: + break + yield chunk + + def _set_common_request_kwargs(self, headers, kwargs): + """Handle the common parameters used to send the request.""" + + # Default Content-Type is json + content_type = headers.get('Content-Type', 'application/json') + + # NOTE(jamielennox): remove this later. Managers should pass json= if + # they want to send json data. + data = kwargs.pop("data", None) + if data is not None and not isinstance(data, six.string_types): + try: + data = json.dumps(data) + content_type = 'application/json' + except TypeError: + # Here we assume it's + # a file-like object + # and we'll chunk it + data = self._chunk_body(data) + + headers['Content-Type'] = content_type + + return data + + def _handle_response(self, resp): + if not resp.ok: + LOG.error("Request returned failure status %s.", resp.status_code) + raise exc.from_response(resp, resp.content) + elif (resp.status_code == requests.codes.MULTIPLE_CHOICES and + resp.request.path_url != '/versions'): + # NOTE(flaper87): Eventually, we'll remove the check on `versions` + # which is a bug (1491350) on the server. + raise exc.from_response(resp) + + content_type = resp.headers.get('Content-Type') + + # Read body into string if it isn't obviously image data + if content_type == 'application/octet-stream': + # Do not read all response in memory when downloading an image. + body_iter = _close_after_stream(resp, CHUNKSIZE) + else: + content = resp.text + if content_type and content_type.startswith('application/json'): + # Let's use requests json method, it should take care of + # response encoding + body_iter = resp.json() + else: + body_iter = six.StringIO(content) + try: + body_iter = json.loads(''.join([c for c in body_iter])) + except ValueError: + body_iter = None + + return resp, body_iter + + +class HTTPClient(_BaseHTTPClient): + + def __init__(self, endpoint, **kwargs): + self.endpoint = endpoint + self.identity_headers = kwargs.get('identity_headers') + self.auth_token = kwargs.get('token') + self.language_header = kwargs.get('language_header') + self.global_request_id = kwargs.get('global_request_id') + if self.identity_headers: + self.auth_token = self.identity_headers.pop('X-Auth-Token', + self.auth_token) + + self.session = requests.Session() + self.session.headers["User-Agent"] = USER_AGENT + + if self.language_header: + self.session.headers["Accept-Language"] = self.language_header + + self.timeout = float(kwargs.get('timeout', 600)) + + if self.endpoint.startswith("https"): + + if kwargs.get('insecure', False) is True: + self.session.verify = False + else: + if kwargs.get('cacert', None) is not '': + self.session.verify = kwargs.get('cacert', True) + + self.session.cert = (kwargs.get('cert_file'), + kwargs.get('key_file')) + + @staticmethod + def parse_endpoint(endpoint): + return netutils.urlsplit(endpoint) + + def log_curl_request(self, method, url, headers, data, kwargs): + curl = ['curl -g -i -X %s' % method] + + headers = copy.deepcopy(headers) + headers.update(self.session.headers) + + for (key, value) in headers.items(): + header = '-H \'%s: %s\'' % utils.safe_header(key, value) + curl.append(header) + + if not self.session.verify: + curl.append('-k') + else: + if isinstance(self.session.verify, six.string_types): + curl.append(' --cacert %s' % self.session.verify) + + if self.session.cert: + curl.append(' --cert %s --key %s' % self.session.cert) + + if data and isinstance(data, six.string_types): + curl.append('-d \'%s\'' % data) + + curl.append(url) + + msg = ' '.join([encodeutils.safe_decode(item, errors='ignore') + for item in curl]) + LOG.debug(msg) + + @staticmethod + def log_http_response(resp): + status = (resp.raw.version / 10.0, resp.status_code, resp.reason) + dump = ['\nHTTP/%.1f %s %s' % status] + headers = resp.headers.items() + dump.extend(['%s: %s' % utils.safe_header(k, v) for k, v in headers]) + dump.append('') + content_type = resp.headers.get('Content-Type') + + if content_type != 'application/octet-stream': + dump.extend([resp.text, '']) + LOG.debug('\n'.join([encodeutils.safe_decode(x, errors='ignore') + for x in dump])) + + def _request(self, method, url, **kwargs): + """Send an http request with the specified characteristics. + + Wrapper around httplib.HTTP(S)Connection.request to handle tasks such + as setting headers and error handling. + """ + # Copy the kwargs so we can reuse the original in case of redirects + headers = copy.deepcopy(kwargs.pop('headers', {})) + + if self.identity_headers: + for k, v in self.identity_headers.items(): + headers.setdefault(k, v) + + data = self._set_common_request_kwargs(headers, kwargs) + + # add identity header to the request + if not headers.get('X-Auth-Token'): + headers['X-Auth-Token'] = self.auth_token + + if self.global_request_id: + headers.setdefault(REQ_ID_HEADER, self.global_request_id) + + if osprofiler_web: + headers.update(osprofiler_web.get_trace_id_headers()) + + # Note(flaper87): Before letting headers / url fly, + # they should be encoded otherwise httplib will + # complain. + headers = encode_headers(headers) + + if self.endpoint.endswith("/") or url.startswith("/"): + conn_url = "%s%s" % (self.endpoint, url) + else: + conn_url = "%s/%s" % (self.endpoint, url) + self.log_curl_request(method, conn_url, headers, data, kwargs) + + try: + resp = self.session.request(method, + conn_url, + data=data, + headers=headers, + **kwargs) + except requests.exceptions.Timeout as e: + message = ("Error communicating with %(url)s: %(e)s" % + dict(url=conn_url, e=e)) + raise exc.InvalidEndpoint(message=message) + except requests.exceptions.ConnectionError as e: + message = ("Error finding address for %(url)s: %(e)s" % + dict(url=conn_url, e=e)) + raise exc.CommunicationError(message=message) + except socket.gaierror as e: + message = "Error finding address for %s: %s" % ( + self.endpoint_hostname, e) + raise exc.InvalidEndpoint(message=message) + except (socket.error, socket.timeout, IOError) as e: + endpoint = self.endpoint + message = ("Error communicating with %(endpoint)s %(e)s" % + {'endpoint': endpoint, 'e': e}) + raise exc.CommunicationError(message=message) + except OpenSSL.SSL.Error as e: + message = ("SSL Error communicating with %(url)s: %(e)s" % + {'url': conn_url, 'e': e}) + raise exc.CommunicationError(message=message) + + # log request-id for each api call + request_id = resp.headers.get('x-openstack-request-id') + if request_id: + LOG.debug('%(method)s call to image for ' + '%(url)s used request id ' + '%(response_request_id)s', + {'method': resp.request.method, + 'url': resp.url, + 'response_request_id': request_id}) + + resp, body_iter = self._handle_response(resp) + self.log_http_response(resp) + return resp, body_iter + + def head(self, url, **kwargs): + return self._request('HEAD', url, **kwargs) + + def get(self, url, **kwargs): + return self._request('GET', url, **kwargs) + + def post(self, url, **kwargs): + return self._request('POST', url, **kwargs) + + def put(self, url, **kwargs): + return self._request('PUT', url, **kwargs) + + def patch(self, url, **kwargs): + return self._request('PATCH', url, **kwargs) + + def delete(self, url, **kwargs): + return self._request('DELETE', url, **kwargs) + + +def _close_after_stream(response, chunk_size): + """Iterate over the content and ensure the response is closed after.""" + # Yield each chunk in the response body + for chunk in response.iter_content(chunk_size=chunk_size): + yield chunk + # Once we're done streaming the body, ensure everything is closed. + # This will return the connection to the HTTPConnectionPool in urllib3 + # and ideally reduce the number of HTTPConnectionPool full warnings. + response.close() + + +class SessionClient(adapter.Adapter, _BaseHTTPClient): + + def __init__(self, session, **kwargs): + kwargs.setdefault('user_agent', USER_AGENT) + self.global_request_id = kwargs.pop('global_request_id', None) + super(SessionClient, self).__init__(session, **kwargs) + + def request(self, url, method, **kwargs): + headers = kwargs.pop('headers', {}) + if self.global_request_id: + headers.setdefault(REQ_ID_HEADER, self.global_request_id) + + kwargs['raise_exc'] = False + data = self._set_common_request_kwargs(headers, kwargs) + try: + # NOTE(pumaranikar): To avoid bug #1641239, no modification of + # headers should be allowed after encode_headers() is called. + resp = super(SessionClient, + self).request(url, + method, + headers=encode_headers(headers), + data=data, + **kwargs) + except ksa_exc.ConnectTimeout as e: + conn_url = self.get_endpoint(auth=kwargs.get('auth')) + conn_url = "%s/%s" % (conn_url.rstrip('/'), url.lstrip('/')) + message = ("Error communicating with %(url)s %(e)s" % + dict(url=conn_url, e=e)) + raise exc.InvalidEndpoint(message=message) + except ksa_exc.ConnectFailure as e: + conn_url = self.get_endpoint(auth=kwargs.get('auth')) + conn_url = "%s/%s" % (conn_url.rstrip('/'), url.lstrip('/')) + message = ("Error finding address for %(url)s: %(e)s" % + dict(url=conn_url, e=e)) + raise exc.CommunicationError(message=message) + + return self._handle_response(resp) + + +def get_http_client(endpoint=None, session=None, **kwargs): + if session: + return SessionClient(session, **kwargs) + elif endpoint: + return HTTPClient(endpoint, **kwargs) + else: + raise AttributeError('Constructing a client must contain either an ' + 'endpoint or a session') diff --git a/python-fmclient/fmclient/fmclient/common/i18n.py b/python-fmclient/fmclient/fmclient/common/i18n.py new file mode 100644 index 00000000..64e958d3 --- /dev/null +++ b/python-fmclient/fmclient/fmclient/common/i18n.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import oslo_i18n + +_translators = oslo_i18n.TranslatorFactory(domain='fmclient') + +# The primary translation function using the well-known name "_" +_ = _translators.primary diff --git a/python-fmclient/fmclient/fmclient/common/options.py b/python-fmclient/fmclient/fmclient/common/options.py new file mode 100644 index 00000000..cea260bf --- /dev/null +++ b/python-fmclient/fmclient/fmclient/common/options.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import re + +from six.moves import urllib + +OP_LOOKUP = {'!=': 'ne', + '>=': 'ge', + '<=': 'le', + '>': 'gt', + '<': 'lt', + '=': 'eq'} + +OP_LOOKUP_KEYS = '|'.join(sorted(OP_LOOKUP.keys(), key=len, reverse=True)) +OP_SPLIT_RE = re.compile(r'(%s)' % OP_LOOKUP_KEYS) + +DATA_TYPE_RE = re.compile(r'^(string|integer|float|datetime|boolean)(::)(.+)$') + + +def build_url(path, q, params=None): + """Convert list of dicts and a list of params to query url format. + + This will convert the following: + "[{field=this,op=le,value=34}, + {field=that,op=eq,value=foo,type=string}], + ['foo=bar','sna=fu']" + to: + "?q.field=this&q.field=that& + q.op=le&q.op=eq& + q.type=&q.type=string& + q.value=34&q.value=foo& + foo=bar&sna=fu" + """ + if q: + query_params = {'q.field': [], + 'q.value': [], + 'q.op': [], + 'q.type': []} + + for query in q: + for name in ['field', 'op', 'value', 'type']: + query_params['q.%s' % name].append(query.get(name, '')) + + # Transform the dict to a sequence of two-element tuples in fixed + # order, then the encoded string will be consistent in Python 2&3. + new_qparams = sorted(query_params.items(), key=lambda x: x[0]) + path += "?" + urllib.parse.urlencode(new_qparams, doseq=True) + + if params: + for p in params: + path += '&%s' % p + elif params: + path += '?%s' % params[0] + for p in params[1:]: + path += '&%s' % p + return path + + +def cli_to_array(cli_query): + """Convert CLI list of queries to the Python API format. + + This will convert the following: + "this<=34;that=string::foo" + to + "[{field=this,op=le,value=34,type=''}, + {field=that,op=eq,value=foo,type=string}]" + + """ + + if cli_query is None: + return None + + def split_by_op(query): + """Split a single query string to field, operator, value.""" + + def _value_error(message): + raise ValueError('invalid query %(query)s: missing %(message)s' % + {'query': query, 'message': message}) + + try: + field, operator, value = OP_SPLIT_RE.split(query, maxsplit=1) + except ValueError: + _value_error('operator') + + if not len(field): + _value_error('field') + + if not len(value): + _value_error('value') + + return field.strip(), operator, value.strip() + + def split_by_data_type(query_value): + frags = DATA_TYPE_RE.match(query_value) + + # The second match is the separator. Return a list without it if + # a type identifier was found. + return frags.group(1, 3) if frags else None + + opts = [] + queries = cli_query.split(';') + for q in queries: + query = split_by_op(q) + opt = {} + opt['field'] = query[0] + opt['op'] = OP_LOOKUP[query[1]] + + # Allow the data type of the value to be specified via ::, + # where type can be one of integer, string, float, datetime, boolean + value_frags = split_by_data_type(query[2]) + if not value_frags: + opt['value'] = query[2] + opt['type'] = '' + else: + opt['type'] = value_frags[0] + opt['value'] = value_frags[1] + opts.append(opt) + return opts diff --git a/python-fmclient/fmclient/fmclient/common/utils.py b/python-fmclient/fmclient/fmclient/common/utils.py new file mode 100644 index 00000000..2521f144 --- /dev/null +++ b/python-fmclient/fmclient/fmclient/common/utils.py @@ -0,0 +1,578 @@ +# Copyright 2012 OpenStack Foundation +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +from __future__ import print_function + +import hashlib + +import re +import six.moves.urllib.parse as urlparse +import six +import os +import copy +import argparse +import dateutil +import prettytable +import textwrap + +from datetime import datetime +from dateutil import parser + +from prettytable import ALL +from prettytable import FRAME +from prettytable import NONE + +import wrapping_formatters + + +SENSITIVE_HEADERS = ('X-Auth-Token', ) + + +class HelpFormatter(argparse.HelpFormatter): + def start_section(self, heading): + # Title-case the headings + heading = '%s%s' % (heading[0].upper(), heading[1:]) + super(HelpFormatter, self).start_section(heading) + + +def safe_header(name, value): + if value is not None and name in SENSITIVE_HEADERS: + h = hashlib.sha1(value) + d = h.hexdigest() + return name, "{SHA1}%s" % d + else: + return name, value + + +def strip_version(endpoint): + if not isinstance(endpoint, six.string_types): + raise ValueError("Expected endpoint") + version = None + # Get rid of trailing '/' if present + endpoint = endpoint.rstrip('/') + url_parts = urlparse.urlparse(endpoint) + (scheme, netloc, path, __, __, __) = url_parts + path = path.lstrip('/') + # regex to match 'v1' or 'v2.0' etc + if re.match('v\d+\.?\d*', path): + version = float(path.lstrip('v')) + endpoint = scheme + '://' + netloc + return endpoint, version + + +def endpoint_version_from_url(endpoint, default_version=None): + if endpoint: + endpoint, version = strip_version(endpoint) + return endpoint, version or default_version + else: + return None, default_version + + +def env(*vars, **kwargs): + """Search for the first defined of possibly many env vars + + Returns the first environment variable defined in vars, or + returns the default defined in kwargs. + """ + for v in vars: + value = os.environ.get(v, None) + if value: + return value + return kwargs.get('default', '') + + +def _wrapping_formatter_callback_decorator(subparser, command, callback): + """ + - Adds the --nowrap option to a CLI command. + This option, when on, deactivates word wrapping. + - Decorates the command's callback function in order to process + the nowrap flag + + :param subparser: + :return: decorated callback + """ + + try: + subparser.add_argument('--nowrap', action='store_true', + help='No wordwrapping of output') + except Exception: + # exception happens when nowrap option already configured + # for command - so get out with callback undecorated + return callback + + def no_wrap_decorator_builder(callback): + + def process_callback_with_no_wrap(cc, args={}): + no_wrap = args.nowrap + # turn on/off wrapping formatters when outputting CLI results + wrapping_formatters.set_no_wrap(no_wrap) + return callback(cc, args=args) + + return process_callback_with_no_wrap + + decorated_callback = no_wrap_decorator_builder(callback) + return decorated_callback + + +def _does_command_need_no_wrap(callback): + if callback.__name__.startswith("do_") and \ + callback.__name__.endswith("_list"): + return True + + if callback.__name__ in \ + ['donot_config_ntp_list', + 'donot_config_ptp_list', + 'do_host_apply_memprofile', + 'do_host_apply_cpuprofile', + 'do_host_apply_ifprofile', + 'do_host_apply_profile', + 'do_host_apply_storprofile', + 'donot_config_oam_list', + 'donot_dns_list', + 'do_host_cpu_modify', + 'do_event_suppress', + 'do_event_unsuppress', + 'do_event_unsuppress_all']: + return True + return False + + +def get_terminal_size(): + """Returns a tuple (x, y) representing the width(x) and the height(x) + in characters of the terminal window. + """ + + def ioctl_GWINSZ(fd): + try: + import fcntl + import struct + import termios + cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, + '1234')) + except Exception: + return None + if cr == (0, 0): + return None + if cr == (0, 0): + return None + return cr + + cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) + if not cr: + try: + fd = os.open(os.ctermid(), os.O_RDONLY) + cr = ioctl_GWINSZ(fd) + os.close(fd) + except Exception: + pass + if not cr: + cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) + return int(cr[1]), int(cr[0]) + + +def normalize_field_data(obj, fields): + for f in fields: + if hasattr(obj, f): + data = getattr(obj, f, '') + try: + data = str(data) + except UnicodeEncodeError: + setattr(obj, f, data.encode('utf-8')) + + +# Decorator for cli-args +def arg(*args, **kwargs): + def _decorator(func): + # Because of the sematics of decorator composition if we just append + # to the options list positional options will appear to be backwards. + func.__dict__.setdefault('arguments', []).insert(0, (args, kwargs)) + return func + + return _decorator + + +def define_command(subparsers, command, callback, cmd_mapper): + '''Define a command in the subparsers collection. + + :param subparsers: subparsers collection where the command will go + :param command: command name + :param callback: function that will be used to process the command + ''' + desc = callback.__doc__ or '' + help = desc.strip().split('\n')[0] + arguments = getattr(callback, 'arguments', []) + + subparser = subparsers.add_parser(command, help=help, + description=desc, + add_help=False, + formatter_class=HelpFormatter) + subparser.add_argument('-h', '--help', action='help', + help=argparse.SUPPRESS) + + # Are we a list command? + if _does_command_need_no_wrap(callback): + # then decorate it with wrapping data formatter functionality + func = _wrapping_formatter_callback_decorator(subparser, command, callback) + else: + func = callback + + cmd_mapper[command] = subparser + for (args, kwargs) in arguments: + subparser.add_argument(*args, **kwargs) + subparser.set_defaults(func=func) + + +def define_commands_from_module(subparsers, command_module, cmd_mapper): + '''Find all methods beginning with 'do_' in a module, and add them + as commands into a subparsers collection. + ''' + for method_name in (a for a in dir(command_module) if a.startswith('do_')): + # Commands should be hypen-separated instead of underscores. + command = method_name[3:].replace('_', '-') + callback = getattr(command_module, method_name) + define_command(subparsers, command, callback, cmd_mapper) + + +def parse_date(string_data): + """Parses a date-like input string into a timezone aware Python + datetime. + """ + + if not isinstance(string_data, six.string_types): + return string_data + + pattern = r'(\d{4}-\d{2}-\d{2}[T ])?\d{2}:\d{2}:\d{2}(\.\d{6})?Z?' + + def convert_date(matchobj): + formats = ["%Y-%m-%dT%H:%M:%S.%f", "%Y-%m-%d %H:%M:%S.%f", + "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S", + "%Y-%m-%dT%H:%M:%SZ"] + datestring = matchobj.group(0) + if datestring: + for format in formats: + try: + datetime.strptime(datestring, format) + datestring += "+0000" + parsed = parser.parse(datestring) + converted = parsed.astimezone(dateutil.tz.tzlocal()) + converted = datetime.strftime(converted, format) + return converted + except Exception: + pass + return datestring + + return re.sub(pattern, convert_date, string_data) + + +def _sort_for_list(objs, fields, formatters={}, sortby=0, reversesort=False): + + # Sort only if necessary + if sortby is None: + return objs + + sort_field = fields[sortby] + # figure out sort key function + if sort_field in formatters: + field_formatter = formatters[sort_field] + if wrapping_formatters.WrapperFormatter.is_wrapper_formatter( + field_formatter): + def sort_key(x): + return field_formatter.wrapper_formatter.get_unwrapped_field_value(x) + else: + def sort_key(x): + return field_formatter(x) + else: + def sort_key(x): + return getattr(x, sort_field, '') + + objs.sort(reverse=reversesort, key=sort_key) + + return objs + + +def str_height(text): + if not text: + return 1 + lines = str(text).split("\n") + height = len(lines) + return height + + +def row_height(texts): + if not texts or len(texts) == 0: + return 1 + height = max(str_height(text) for text in texts) + return height + + +class WRPrettyTable(prettytable.PrettyTable): + """A PrettyTable that allows word wrapping of its headers.""" + + def __init__(self, field_names=None, **kwargs): + super(WRPrettyTable, self).__init__(field_names, **kwargs) + + def _stringify_header(self, options): + """ + This overridden version of _stringify_header can wrap its + header data. It leverages the functionality in _stringify_row + to perform this task. + :returns string of header, including border text + """ + bits = [] + if options["border"]: + if options["hrules"] in (ALL, FRAME): + bits.append(self._hrule) + bits.append("\n") + # For tables with no data or field names + if not self._field_names: + if options["vrules"] in (ALL, FRAME): + bits.append(options["vertical_char"]) + bits.append(options["vertical_char"]) + else: + bits.append(" ") + bits.append(" ") + + header_row_data = [] + for field in self._field_names: + if options["fields"] and field not in options["fields"]: + continue + if self._header_style == "cap": + fieldname = field.capitalize() + elif self._header_style == "title": + fieldname = field.title() + elif self._header_style == "upper": + fieldname = field.upper() + elif self._header_style == "lower": + fieldname = field.lower() + else: + fieldname = field + header_row_data.append(fieldname) + + # output actual header row data, word wrap when necessary + bits.append(self._stringify_row(header_row_data, options)) + + if options["border"] and options["hrules"] != NONE: + bits.append("\n") + bits.append(self._hrule) + + return "".join(bits) + + +def prettytable_builder(field_names=None, **kwargs): + return WRPrettyTable(field_names, **kwargs) + + +def wordwrap_header(field, field_label, formatter): + """ + Given a field label (the header text for one column) and the word wrapping formatter for a column, + this function asks the formatter for the desired column width and then + performs a wordwrap of field_label + + :param field: the field name associated with the field_label + :param field_label: field_label to word wrap + :param formatter: the field formatter + :return: word wrapped field_label + """ + if wrapping_formatters.is_nowrap_set(): + return field_label + + if not wrapping_formatters.WrapperFormatter.is_wrapper_formatter(formatter): + return field_label + # go to the column's formatter and ask it what the width should be + wrapper_formatter = formatter.wrapper_formatter + actual_width = wrapper_formatter.get_actual_column_char_len(wrapper_formatter.get_calculated_desired_width()) + # now word wrap based on column width + wrapped_header = textwrap.fill(field_label, actual_width) + return wrapped_header + + +def default_printer(s): + print(s) + + +def pt_builder(field_labels, fields, formatters, paging, printer=default_printer): + """ + returns an object that 'fronts' a prettyTable object + that can handle paging as well as automatically falling back + to not word wrapping when word wrapping does not cause the + output to fit the terminal width. + """ + + class PT_Builder(object): + + def __init__(self, field_labels, fields, formatters, no_paging): + self.objs_in_pt = [] + self.unwrapped_field_labels = field_labels + self.fields = fields + self.formatters = formatters + self.header_height = 0 + self.terminal_width, self.terminal_height = get_terminal_size() + self.terminal_lines_left = self.terminal_height + self.paging = not no_paging + self.paged_rows_added = 0 + self.pt = None + self.quit = False + + def add_row(self, obj): + if self.quit: + return False + if not self.pt: + self.build_pretty_table() + return self._row_add(obj) + + def __add_row_and_obj(self, row, obj): + self.pt.add_row(row) + self.objs_in_pt.append(obj) + + def _row_add(self, obj): + + row = _build_row_from_object(self.fields, self.formatters, obj) + + if not paging: + self.__add_row_and_obj(row, obj) + return True + + rheight = row_height(row) + if (self.terminal_lines_left - rheight) >= 0 or self.paged_rows_added == 0: + self.__add_row_and_obj(row, obj) + self.terminal_lines_left -= rheight + else: + printer(self.get_string()) + if self.terminal_lines_left > 0: + printer("\n" * (self.terminal_lines_left - 1)) + + s = six.moves.input("Press Enter to continue or 'q' to exit...") + if s == 'q': + self.quit = True + return False + self.terminal_lines_left = self.terminal_height - self.header_height + self.build_pretty_table() + self.__add_row_and_obj(row, obj) + self.terminal_lines_left -= rheight + self.paged_rows_added += 1 + + def get_string(self): + if not self.pt: + self.build_pretty_table() + objs = copy.copy(self.objs_in_pt) + self.objs_in_pt = [] + output = self.pt.get_string() + if wrapping_formatters.is_nowrap_set(): + return output + output_width = wrapping_formatters.get_width(output) + if output_width <= self.terminal_width: + return output + # At this point pretty Table (self.pt) does not fit the terminal width so let's + # temporarily turn wrapping off, rebuild the pretty Table with the data unwrapped. + orig_no_wrap_settings = wrapping_formatters.set_no_wrap_on_formatters(True, self.formatters) + self.build_pretty_table() + for o in objs: + self.add_row(o) + wrapping_formatters.unset_no_wrap_on_formatters(orig_no_wrap_settings) + return self.pt.get_string() + + def build_pretty_table(self): + field_labels = [wordwrap_header(field, field_label, formatter) + for field, field_label, formatter in + zip(self.fields, self.unwrapped_field_labels, [formatters.get(f, None) + for f in self.fields])] + self.pt = prettytable_builder(field_labels, caching=False, print_empty=False) + self.pt.align = 'l' + # 2 header border lines + 1 bottom border + 1 prompt + header data height + self.header_height = 2 + 1 + 1 + row_height(field_labels) + self.terminal_lines_left = self.terminal_height - self.header_height + return self.pt + + def done(self): + if self.quit: + return + + if not self.paging or (self.terminal_lines_left < self.terminal_height - self.header_height): + printer(self.get_string()) + + return PT_Builder(field_labels, fields, formatters, not paging) + + +def print_long_list(objs, fields, field_labels, formatters={}, sortby=0, reversesort=False, no_wrap_fields=[], + no_paging=False, printer=default_printer): + + formatters = wrapping_formatters.as_wrapping_formatters(objs, fields, field_labels, formatters, + no_wrap_fields=no_wrap_fields) + + objs = _sort_for_list(objs, fields, formatters=formatters, sortby=sortby, reversesort=reversesort) + + pt = pt_builder(field_labels, fields, formatters, not no_paging, printer=printer) + + for o in objs: + pt.add_row(o) + + pt.done() + + +def print_dict(d, dict_property="Property", wrap=0): + pt = prettytable.PrettyTable([dict_property, 'Value'], + caching=False, print_empty=False) + pt.align = 'l' + for k, v in sorted(d.iteritems()): + v = parse_date(v) + # convert dict to str to check length + if isinstance(v, dict): + v = str(v) + if wrap > 0: + v = textwrap.fill(six.text_type(v), wrap) + # if value has a newline, add in multiple rows + # e.g. fault with stacktrace + if v and isinstance(v, str) and r'\n' in v: + lines = v.strip().split(r'\n') + col1 = k + for line in lines: + pt.add_row([col1, line]) + col1 = '' + else: + pt.add_row([k, v]) + + print(pt.get_string()) + + +def _build_row_from_object(fields, formatters, o): + """ + takes an object o and converts to an array of values + compatible with the input for prettyTable.add_row(row) + """ + row = [] + for field in fields: + if field in formatters: + data = parse_date(getattr(o, field, '')) + setattr(o, field, data) + data = formatters[field](o) + row.append(data) + else: + data = parse_date(getattr(o, field, '')) + row.append(data) + return row + + +def print_list(objs, fields, field_labels, formatters={}, sortby=0, + reversesort=False, no_wrap_fields=[], printer=default_printer): + # print_list() is the same as print_long_list() with paging turned off + return print_long_list(objs, fields, field_labels, formatters=formatters, sortby=sortby, + reversesort=reversesort, no_wrap_fields=no_wrap_fields, + no_paging=True, printer=printer) diff --git a/python-fmclient/fmclient/fmclient/common/wrapping_formatters.py b/python-fmclient/fmclient/fmclient/common/wrapping_formatters.py new file mode 100644 index 00000000..874bc46c --- /dev/null +++ b/python-fmclient/fmclient/fmclient/common/wrapping_formatters.py @@ -0,0 +1,807 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +""" +Manages WrapperFormatter objects. + +WrapperFormatter objects can be used for wrapping CLI column celldata in order +for the CLI table (using prettyTable) to fit the terminal screen + +The basic idea is: + + Once celldata is retrieved and ready to display, first iterate through the celldata + and word wrap it so that fits programmer desired column widths. The + WrapperFormatter objects fill this role. + + Once the celldata is formatted to their desired widths, then it can be passed to + the existing prettyTable code base for rendering. + +""" +import copy +import re +import six +import textwrap + +from cli_no_wrap import is_nowrap_set +from cli_no_wrap import set_no_wrap +from prettytable import _get_size + +UUID_MIN_LENGTH = 36 + +# monkey patch (customize) how the textwrap module breaks text into chunks +wordsep_re = re.compile(r'(\s+|' # any whitespace + r',|' + r'=|' + r'\.|' + r':|' + r'[^\s\w]*\w+[^0-9\W]-(?=\w+[^0-9\W])|' # hyphenated words + r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash + +textwrap.TextWrapper.wordsep_re = wordsep_re + + +def get_width(value): + if value is None: + return 0 + + return _get_size(six.text_type(value))[0] # get width from [width,height] + + +def _get_terminal_width(): + from utils import get_terminal_size + result = get_terminal_size()[0] + return result + + +def is_uuid_field(field_name): + """ + :param field_name: + :return: True if field_name looks like a uuid name + """ + if field_name is not None and field_name in ["uuid", "UUID"] or field_name.endswith("uuid"): + return True + return False + + +class WrapperContext(object): + """Context for the wrapper formatters + + Maintains a list of the current WrapperFormatters + being used to format the prettyTable celldata + + Allows wrappers access to its 'sibling' wrappers + contains convenience methods and attributes + for calculating current tableWidth. + """ + + def __init__(self): + self.wrappers = [] + self.wrappers_by_field = {} + self.non_data_chrs_used_by_table = 0 + self.num_columns = 0 + self.terminal_width = -1 + + def set_num_columns(self, num_columns): + self.num_columns = num_columns + self.non_data_chrs_used_by_table = (num_columns * 3) + 1 + + def add_column_formatter(self, field, wrapper): + self.wrappers.append(wrapper) + self.wrappers_by_field[field] = wrapper + + def get_terminal_width(self): + if self.terminal_width == -1: + self.terminal_width = _get_terminal_width() + return self.terminal_width + + def get_table_width(self): + """ + Calculates table width by looping through all + column formatters and summing up their widths + :return: total table width + """ + widths = [w.get_actual_column_char_len(w.get_calculated_desired_width(), check_remaining_row_chars=False) for w + in + self.wrappers] + chars_used_by_data = sum(widths) + width = self.non_data_chrs_used_by_table + chars_used_by_data + return width + + def is_table_too_wide(self): + """ + :return: True if calculated table width is too wide for the terminal width + """ + if self.get_terminal_width() < self.get_table_width(): + return True + return False + + +def field_value_function_factory(formatter, field): + """Builds function for getting a field value from table cell celldata + As a side-effect, attaches function as the 'get_field_value' attribute + of the formatter + :param formatter:the formatter to attach return function to + :param field: + :return: function that returns cell celldata + """ + + def field_value_function_builder(data): + if isinstance(data, dict): + formatter.get_field_value = lambda celldata: celldata.get(field, None) + else: + formatter.get_field_value = lambda celldata: getattr(celldata, field) + return formatter.get_field_value(data) + + return field_value_function_builder + + +class WrapperFormatter(object): + """Base (abstract) class definition of wrapping formatters""" + + def __init__(self, ctx, field): + self.ctx = ctx + self.add_blank_line = False + self.no_wrap = False + self.min_width = 0 + self.field = field + self.header_width = 0 + self.actual_column_char_len = -1 + self.textWrapper = None + + if self.field: + self.get_field_value = field_value_function_factory(self, field) + else: + self.get_field_value = lambda data: data + + def get_basic_desired_width(self): + return self.min_width + + def get_calculated_desired_width(self): + basic_desired_width = self.get_basic_desired_width() + if self.header_width > basic_desired_width: + return self.header_width + return basic_desired_width + + def get_sibling_wrappers(self): + """ + :return: a list of your sibling wrappers for the other fields + """ + others = [w for w in self.ctx.wrappers if w != self] + return others + + def get_remaining_row_chars(self): + used = [w.get_actual_column_char_len(w.get_calculated_desired_width(), + check_remaining_row_chars=False) + for w in self.get_sibling_wrappers()] + chrs_used_by_data = sum(used) + remaining_chrs_in_row = (self.ctx.get_terminal_width() - + self.ctx.non_data_chrs_used_by_table) - chrs_used_by_data + return remaining_chrs_in_row + + def set_min_width(self, min_width): + self.min_width = min_width + + def set_actual_column_len(self, actual): + self.actual_column_char_len = actual + + def get_actual_column_char_len(self, desired_char_len, check_remaining_row_chars=True): + """Utility method to adjust desired width to a width + that can actually be applied based on current table width + and current terminal width + + Will not allow actual width to be less than min_width + min_width is typically length of the column header text + or the longest 'word' in the celldata + + :param desired_char_len: + :param check_remaining_row_chars: + :return: + """ + if self.actual_column_char_len != -1: + return self.actual_column_char_len # already calculated + if desired_char_len < self.min_width: + actual = self.min_width + else: + actual = desired_char_len + if check_remaining_row_chars and actual > self.min_width: + remaining = self.get_remaining_row_chars() + if actual > remaining >= self.min_width: + actual = remaining + if check_remaining_row_chars: + self.set_actual_column_len(actual) + if self.ctx.is_table_too_wide(): + # Table too big can I shrink myself? + if actual > self.min_width: + # shrink column + while actual > self.min_width: + actual -= 1 # TODO(jkung): fix in next sprint + # each column needs to share in + # table shrinking - but this is good + # enough for now - also - why the loop? + self.set_actual_column_len(actual) + + return actual + + def _textwrap_fill(self, s, actual_width): + if not self.textWrapper: + self.textWrapper = textwrap.TextWrapper(actual_width) + else: + self.textWrapper.width = actual_width + return self.textWrapper.fill(s) + + def text_wrap(self, s, width): + """ + performs actual text wrap + :param s: + :param width: in characters + :return: formatted text + """ + if self.no_wrap: + return s + actual_width = self.get_actual_column_char_len(width) + new_s = self._textwrap_fill(s, actual_width) + wrapped = new_s != s + if self.add_blank_line and wrapped: + new_s += "\n".ljust(actual_width) + return new_s + + def format(self, data): + return str(self.get_field_value(data)) + + def get_unwrapped_field_value(self, data): + return self.get_field_value(data) + + def as_function(self): + def foo(data): + return self.format(data) + + foo.WrapperFormatterMarker = True + foo.wrapper_formatter = self + return foo + + @staticmethod + def is_wrapper_formatter(foo): + if not foo: + return False + return getattr(foo, "WrapperFormatterMarker", False) + + +class WrapperLambdaFormatter(WrapperFormatter): + """A wrapper formatter that adapts a function (callable) + to look like a WrapperFormatter + """ + + def __init__(self, ctx, field, format_function): + super(WrapperLambdaFormatter, self).__init__(ctx, field) + self.format_function = format_function + + def format(self, data): + return self.format_function(self.get_field_value(data)) + + +class WrapperFixedWidthFormatter(WrapperLambdaFormatter): + """A wrapper formatter that forces the text to wrap within + a specific width (in chars) + """ + + def __init__(self, ctx, field, width): + super(WrapperFixedWidthFormatter, self).__init__(ctx, field, + lambda data: + self.text_wrap(str(data), + self.get_calculated_desired_width())) + self.width = width + + def get_basic_desired_width(self): + return self.width + + +class WrapperPercentWidthFormatter(WrapperFormatter): + """A wrapper formatter that forces the text to wrap within + a specific percentage width of the current terminal width + """ + + def __init__(self, ctx, field, width_as_decimal): + super(WrapperPercentWidthFormatter, self).__init__(ctx, field) + self.width_as_decimal = width_as_decimal + + def get_basic_desired_width(self): + width = int((self.ctx.get_terminal_width() - self.ctx.non_data_chrs_used_by_table) * + self.width_as_decimal) + return width + + def format(self, data): + width = self.get_calculated_desired_width() + field_value = self.get_field_value(data) + return self.text_wrap(str(field_value), width) + + +class WrapperWithCustomFormatter(WrapperLambdaFormatter): + """A wrapper formatter that allows the programmer to have a custom + formatter (in the form of a function) that is first applied + and then a wrapper function is applied to the result + + See wrapperFormatterFactory for a better explanation! :-) + """ + + # noinspection PyUnusedLocal + def __init__(self, ctx, field, custom_formatter, wrapper_formatter): + super(WrapperWithCustomFormatter, self).__init__(ctx, None, + lambda data: wrapper_formatter.format(custom_formatter(data))) + self.wrapper_formatter = wrapper_formatter + self.custom_formatter = custom_formatter + + def get_unwrapped_field_value(self, data): + return self.custom_formatter(data) + + def __setattr__(self, name, value): + # + # Some attributes set onto this class need + # to be pushed down to the 'inner' wrapper_formatter + # + super(WrapperWithCustomFormatter, self).__setattr__(name, value) + if hasattr(self, "wrapper_formatter"): + if name == "no_wrap": + self.wrapper_formatter.no_wrap = value + if name == "add_blank_line": + self.wrapper_formatter.add_blank_line = value + if name == "header_width": + self.wrapper_formatter.header_width = value + + def set_min_width(self, min_width): + super(WrapperWithCustomFormatter, self).set_min_width(min_width) + self.wrapper_formatter.set_min_width(min_width) + + def set_actual_column_len(self, actual): + super(WrapperWithCustomFormatter, self).set_actual_column_len(actual) + self.wrapper_formatter.set_actual_column_len(actual) + + def get_basic_desired_width(self): + return self.wrapper_formatter.get_basic_desired_width() + + +def wrapper_formatter_factory(ctx, field, formatter): + """ + This function is a factory for building WrapperFormatter objects. + + The function needs to be called for each celldata column (field) + that will be displayed in the prettyTable. + + The function looks at the formatter parameter and based on its type, + determines what WrapperFormatter to construct per field (column). + + ex: + + formatter = 15 - type = int : Builds a WrapperFixedWidthFormatter that + will wrap at 15 chars + + formatter = .25 - type = int : Builds a WrapperPercentWidthFormatter that + will wrap at 25% terminal width + + formatter = type = callable : Builds a WrapperLambdaFormatter that + will call some arbitrary function + + formatter = type = dict : Builds a WrapperWithCustomFormatter that + will call some arbitrary function to format + and then apply a wrapping formatter to the result + + ex: this dict {"formatter" : captializeFunction,, + "wrapperFormatter": .12} + will apply the captializeFunction to the column + celldata and then wordwrap at 12 % of terminal width + + :param ctx: the WrapperContext that the built WrapperFormatter will use + :param field: name of field (column_ that the WrapperFormatter will execute on + :param formatter: specifies type and input for WrapperFormatter that will be built + :return: WrapperFormatter + + """ + if isinstance(formatter, WrapperFormatter): + return formatter + if callable(formatter): + return WrapperLambdaFormatter(ctx, field, formatter) + if isinstance(formatter, int): + return WrapperFixedWidthFormatter(ctx, field, formatter) + if isinstance(formatter, float): + return WrapperPercentWidthFormatter(ctx, field, formatter) + if isinstance(formatter, dict): + if "wrapperFormatter" in formatter: + embedded_wrapper_formatter = wrapper_formatter_factory(ctx, None, + formatter["wrapperFormatter"]) + elif "hard_width" in formatter: + embedded_wrapper_formatter = WrapperFixedWidthFormatter(ctx, field, formatter["hard_width"]) + embedded_wrapper_formatter.min_width = formatter["hard_width"] + else: + embedded_wrapper_formatter = WrapperFormatter(ctx, None) # effectively a NOOP width formatter + if "formatter" not in formatter: + return embedded_wrapper_formatter + custom_formatter = formatter["formatter"] + wrapper = WrapperWithCustomFormatter(ctx, field, custom_formatter, embedded_wrapper_formatter) + return wrapper + + raise Exception("Formatter Error! Unrecognized formatter {} for field {}".format(formatter, field)) + + +def build_column_stats_for_best_guess_formatting(objs, fields, field_labels, custom_formatters={}): + class ColumnStats: + def __init__(self, field, field_label, custom_formatter=None): + self.field = field + self.field_label = field_label + self.average_width = 0 + self.min_width = get_width(field_label) if field_label else 0 + self.max_width = get_width(field_label) if field_label else 0 + self.total_width = 0 + self.count = 0 + self.average_percent = 0 + self.max_percent = 0 + self.isUUID = is_uuid_field(field) + if custom_formatter: + self.get_field_value = custom_formatter + else: + self.get_field_value = field_value_function_factory(self, field) + + def add_value(self, value): + if self.isUUID: + return + self.count += 1 + value_width = get_width(value) + self.total_width = self.total_width + value_width + if value_width < self.min_width: + self.min_width = value_width + if value_width > self.max_width: + self.max_width = value_width + if self.count > 0: + self.average_width = float(self.total_width) / float(self.count) + + def set_max_percent(self, max_total_width): + if max_total_width > 0: + self.max_percent = float(self.max_width) / float(max_total_width) + + def set_avg_percent(self, avg_total_width): + if avg_total_width > 0: + self.average_percent = float(self.average_width) / float(avg_total_width) + + def __str__(self): + return str([self.field, + self.average_width, + self.min_width, + self.max_width, + self.total_width, + self.count, + self.average_percent, + self.max_percent, + self.isUUID]) + + def __repr__(self): + return str([self.field, + self.average_width, + self.min_width, + self.max_width, + self.total_width, + self.count, + self.average_percent, + self.max_percent, + self.isUUID]) + + if objs is None or len(objs) == 0: + return {"stats": {}, + "total_max_width": 0, + "total_avg_width": 0} + + stats = {} + for i in range(0, len(fields)): + stats[fields[i]] = ColumnStats(fields[i], field_labels[i], custom_formatters.get(fields[i])) + + for obj in objs: + for field in fields: + column_stat = stats[field] + column_stat.add_value(column_stat.get_field_value(obj)) + + total_max_width = sum([s.max_width for s in stats.values()]) + total_avg_width = sum([s.average_width for s in stats.values()]) + return {"stats": stats, + "total_max_width": total_max_width, + "total_avg_width": total_avg_width} + + +def build_best_guess_formatters_using_average_widths(objs, fields, field_labels, custom_formatters={}, no_wrap_fields=[]): + column_info = build_column_stats_for_best_guess_formatting(objs, fields, field_labels, custom_formatters) + format_spec = {} + total_avg_width = float(column_info["total_avg_width"]) + if total_avg_width <= 0: + return format_spec + for f in [ff for ff in fields if ff not in no_wrap_fields]: + format_spec[f] = float(column_info["stats"][f].average_width) / total_avg_width + custom_formatter = custom_formatters.get(f, None) + if custom_formatter: + format_spec[f] = {"formatter": custom_formatter, "wrapperFormatter": format_spec[f]} + + # Handle no wrap fields by building formatters that will not wrap + for f in [ff for ff in fields if ff in no_wrap_fields]: + format_spec[f] = {"hard_width": column_info["stats"][f].max_width} + custom_formatter = custom_formatters.get(f, None) + if custom_formatter: + format_spec[f] = {"formatter": custom_formatter, "wrapperFormatter": format_spec[f]} + return format_spec + + +def build_best_guess_formatters_using_max_widths(objs, fields, field_labels, custom_formatters={}, no_wrap_fields=[]): + column_info = build_column_stats_for_best_guess_formatting(objs, fields, field_labels, custom_formatters) + format_spec = {} + for f in [ff for ff in fields if ff not in no_wrap_fields]: + format_spec[f] = float(column_info["stats"][f].max_width) / float(column_info["total_max_width"]) + custom_formatter = custom_formatters.get(f, None) + if custom_formatter: + format_spec[f] = {"formatter": custom_formatter, "wrapperFormatter": format_spec[f]} + + # Handle no wrap fields by building formatters that will not wrap + for f in [ff for ff in fields if ff in no_wrap_fields]: + format_spec[f] = {"hard_width": column_info["stats"][f].max_width} + custom_formatter = custom_formatters.get(f, None) + if custom_formatter: + format_spec[f] = {"formatter": custom_formatter, "wrapperFormatter": format_spec[f]} + + return format_spec + + +def needs_wrapping_formatters(formatters, no_wrap=None): + no_wrap = is_nowrap_set(no_wrap) + if no_wrap: + return False + + # handle easy case: + if not formatters: + return True + + # If we have at least one wrapping formatter, + # then we assume we don't need to wrap + for f in formatters.values(): + if WrapperFormatter.is_wrapper_formatter(f): + return False + + # looks like we need wrapping + return True + + +def as_wrapping_formatters(objs, fields, field_labels, formatters, no_wrap=None, no_wrap_fields=[]): + """This function is the entry point for building the "best guess" + word wrapping formatters. A best guess formatter guesses what the best + columns widths should be for the table celldata. It does this by collecting + various stats on the celldata (min, max average width of column celldata) and from + this celldata decides the desired widths and the minimum widths. + + Given a list of formatters and the list of objects (objs), this function + first determines if we need to augment the passed formatters with word wrapping + formatters. If the no_wrap parameter or global no_wrap flag is set, + then we do not build wrapping formatters. If any of the formatters within formatters + is a word wrapping formatter, then it is assumed no more wrapping is required. + + :param objs: + :param fields: + :param field_labels: + :param formatters: + :param no_wrap: + :param no_wrap_fields: + :return: When no wrapping is required, the formatters parameter is returned + -- effectively a NOOP in this case + + When wrapping is required, best-guess word wrapping formatters are returned + with original parameter formatters embedded in the word wrapping formatters + """ + no_wrap = is_nowrap_set(no_wrap) + + if not needs_wrapping_formatters(formatters, no_wrap): + return formatters + + format_spec = build_best_guess_formatters_using_average_widths(objs, fields, field_labels, formatters, no_wrap_fields) + + formatters = build_wrapping_formatters(objs, fields, field_labels, format_spec) + + return formatters + + +def build_wrapping_formatters(objs, fields, field_labels, format_spec, add_blank_line=True, + no_wrap=None, use_max=False): + """ + A convenience function for building all wrapper formatters that will be used to + format a CLI's output when its rendered in a prettyTable object. + + It iterates through the keys of format_spec and calls wrapperFormatterFactory to build + wrapperFormatter objects for each column. + + Its best to show by example parameters: + + field_labels = ['UUID', 'Time Stamp', 'State', 'Event Log ID', 'Reason Text', + 'Entity Instance ID', 'Severity'] + fields = ['uuid', 'timestamp', 'state', 'event_log_id', 'reason_text', + 'entity_instance_id', 'severity'] + format_spec = { + "uuid" : .10, # float = so display as 10% of terminal width + "timestamp" : .08, + "state" : .08, + "event_log_id" : .07, + "reason_text" : .42, + "entity_instance_id" : .13, + "severity" : {"formatter" : captializeFunction, + "wrapperFormatter": .12} + } + + :param objs: the actual celldata that will get word wrapped + :param fields: fields (attributes of the celldata) that will be displayed in the table + :param field_labels: column (field headers) + :param format_spec: dict specify formatter for each column (field) + :param add_blank_line: default True, when tru adds blank line to column if it wraps, aids readability + :param no_wrap: default False, when True turns wrapping off but does not suppress other custom formatters + :param use_max + :return: wrapping formatters as functions + """ + + no_wrap = set_no_wrap(no_wrap) + + if objs is None or len(objs) == 0: + return {} + + biggest_word_pattern = re.compile("[\.:,;\!\?\\ =-\_]") + + def get_biggest_word(s): + return max(biggest_word_pattern.split(s), key=len) + + wrapping_formatters_as_functions = {} + + if len(fields) != len(field_labels): + raise Exception("Error in buildWrappingFormatters: " + "len(fields) = {}, len(field_labels) = {}," + " they must be the same length!".format(len(fields), + len(field_labels))) + field_to_label = {} + + for i in range(0, len(fields)): + field_to_label[fields[i]] = field_labels[i] + + ctx = WrapperContext() + ctx.set_num_columns(len(fields)) + + if not format_spec: + if use_max: + format_spec = build_best_guess_formatters_using_max_widths(objs, fields, field_labels) + else: + format_spec = build_best_guess_formatters_using_average_widths(objs, fields, field_labels) + + for k in format_spec.keys(): + if k not in fields: + raise Exception("Error in buildWrappingFormatters: format_spec " + "specifies a field {} that is not specified " + "in fields : {}".format(k, fields)) + + format_spec_for_k = copy.deepcopy(format_spec[k]) + if callable(format_spec_for_k): + format_spec_for_k = {"formatter": format_spec_for_k} + wrapper_formatter = wrapper_formatter_factory(ctx, k, format_spec_for_k) + if wrapper_formatter.min_width <= 0: + # need to specify min-width so that + # column is not unnecessarily squashed + if is_uuid_field(k): # special case + wrapper_formatter.set_min_width(UUID_MIN_LENGTH) + else: + # column width cannot be smaller than the widest word + column_data = [str(wrapper_formatter.get_unwrapped_field_value(data)) for data in objs] + widest_word_in_column = max([get_biggest_word(d) + " " + for d in column_data + [field_to_label[k]]], key=len) + wrapper_formatter.set_min_width(len(widest_word_in_column)) + wrapper_formatter.header_width = get_width(field_to_label[k]) + + wrapper_formatter.add_blank_line = add_blank_line + wrapper_formatter.no_wrap = no_wrap + wrapping_formatters_as_functions[k] = wrapper_formatter.as_function() + ctx.add_column_formatter(k, wrapper_formatter) + + return wrapping_formatters_as_functions + + +def set_no_wrap_on_formatters(no_wrap, formatters): + """ + Purpose of this function is to temporarily force + the no_wrap setting for the formatters parameter. + returns orig_no_wrap_settings defined for each formatter + Use unset_no_wrap_on_formatters(orig_no_wrap_settings) to undo what + this function does + """ + # handle easy case: + if not formatters: + return {} + + formatter_no_wrap_settings = {} + + global_orig_no_wrap = is_nowrap_set() + set_no_wrap(no_wrap) + + for k, f in formatters.iteritems(): + if WrapperFormatter.is_wrapper_formatter(f): + formatter_no_wrap_settings[k] = (f.wrapper_formatter.no_wrap, f.wrapper_formatter) + f.wrapper_formatter.no_wrap = no_wrap + + return {"global_orig_no_wrap": global_orig_no_wrap, + "formatter_no_wrap_settings": formatter_no_wrap_settings} + + +def unset_no_wrap_on_formatters(orig_no_wrap_settings): + """ + It only makes sense to call this function with the return value + from the last call to set_no_wrap_on_formatters(no_wrap, formatters). + It effectively undoes what set_no_wrap_on_formatters() does + """ + if not orig_no_wrap_settings: + return {} + + global_orig_no_wrap = orig_no_wrap_settings["global_orig_no_wrap"] + formatter_no_wrap_settings = orig_no_wrap_settings["formatter_no_wrap_settings"] + + formatters = {} + + for k, v in formatter_no_wrap_settings.iteritems(): + formatters[k] = v[1] + formatters[k].no_wrap = v[0] + + set_no_wrap(global_orig_no_wrap) + + return formatters + + +def _simpleTestHarness(no_wrap): + + import utils + + def testFormatter(event): + return "*{}".format(event["state"]) + + def buildFormatter(field, width): + def f(dict): + if field == 'number': + return dict[field] + return "{}".format(dict[field]).replace("_", " ") + return {"formatter": f, "wrapperFormatter": width} + + set_no_wrap(no_wrap) + + field_labels = ['Time Stamp', 'State', 'Event Log ID', 'Reason Text', + 'Entity Instance ID', 'Severity', 'Number'] + fields = ['timestamp', 'state', 'event_log_id', 'reason_text', + 'entity_instance_id', 'severity', 'number'] + + formatterSpecX = {"timestamp": 10, + "state": 8, + "event_log_id": 70, + "reason_text": 30, + "entity_instance_id": 30, + "severity": 12, + "number": 4} + + formatterSpec = {} + for f in fields: + formatterSpec[f] = buildFormatter(f, formatterSpecX[f]) + + logs = [] + for i in range(0, 30): + log = {} + for f in fields: + if f == 'number': + log[f] = i + else: + log[f] = "{}{}".format(f, i) + logs.append(utils.objectify(log)) + + formatterSpec = formatterSpecX + + formatters = build_wrapping_formatters(logs, fields, field_labels, formatterSpec) + + utils.print_list(logs, fields, field_labels, formatters=formatters, sortby=6, + reversesort=True, no_wrap_fields=['entity_instance_id']) + + print("nowrap = {}".format(is_nowrap_set())) + + +if __name__ == "__main__": + _simpleTestHarness(True) + _simpleTestHarness(False) diff --git a/python-fmclient/fmclient/fmclient/exc.py b/python-fmclient/fmclient/fmclient/exc.py new file mode 100644 index 00000000..829fa571 --- /dev/null +++ b/python-fmclient/fmclient/fmclient/exc.py @@ -0,0 +1,89 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +class BaseException(Exception): + """An error occurred.""" + def __init__(self, message=None): + self.message = message + + def __str__(self): + return str(self.message) or self.__class__.__doc__ + + +class AuthSystem(BaseException): + """Could not obtain token and endpoint using provided credentials.""" + pass + + +class CommandError(BaseException): + """Invalid usage of CLI.""" + + +class InvalidEndpoint(BaseException): + """The provided endpoint is invalid.""" + + +class CommunicationError(BaseException): + """Unable to communicate with server.""" + + +class EndpointException(BaseException): + pass + + +class HTTPException(Exception): + """Base exception for all HTTP-derived exceptions.""" + code = 'N/A' + + def __init__(self, details=None): + self.details = details + + def __str__(self): + return str(self.details) or "%s (HTTP %s)" % (self.__class__.__name__, + self.code) + + +class HTTPMultipleChoices(HTTPException): + code = 300 + + def __str__(self): + self.details = "Requested version of FM API is not available." + return "%s (HTTP %s) %s" % (self.__class__.__name__, self.code, + self.details) + + +class Unauthorized(HTTPException): + code = 401 + + +class HTTPUnauthorized(Unauthorized): + pass + + +class NotFound(HTTPException): + """DEPRECATED.""" + code = 404 + + +class HTTPNotFound(NotFound): + pass + + +class HTTPMethodNotAllowed(HTTPException): + code = 405 + + +class HTTPInternalServerError(HTTPException): + code = 500 + + +class HTTPNotImplemented(HTTPException): + code = 501 + + +class HTTPBadGateway(HTTPException): + code = 502 diff --git a/python-fmclient/fmclient/fmclient/shell.py b/python-fmclient/fmclient/fmclient/shell.py new file mode 100644 index 00000000..c9d3be8f --- /dev/null +++ b/python-fmclient/fmclient/fmclient/shell.py @@ -0,0 +1,326 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +""" +Command-line interface for Fault Management +""" + +import argparse +import httplib2 +import logging +import sys +from oslo_utils import importutils + +import fmclient +from fmclient.common import utils +from fmclient import exc +from fmclient import client + + +class FmShell(object): + + def get_base_parser(self): + parser = argparse.ArgumentParser( + prog='fm', + description=__doc__.strip(), + epilog='See "fm help COMMAND" ' + 'for help on a specific command.', + add_help=False, + formatter_class=HelpFormatter, + ) + + # Global arguments + parser.add_argument('-h', '--help', + action='store_true', + help=argparse.SUPPRESS, + ) + + parser.add_argument('--version', + action='version', + version=fmclient.__version__) + + parser.add_argument('--debug', + default=bool(utils.env('FMCLIENT_DEBUG')), + action='store_true', + help='Defaults to env[FMCLIENT_DEBUG]') + + parser.add_argument('-v', '--verbose', + default=False, action="store_true", + help="Print more verbose output") + + parser.add_argument('--timeout', + default=600, + help='Number of seconds to wait for a response') + + parser.add_argument('--os-username', + default=utils.env('OS_USERNAME'), + help='Defaults to env[OS_USERNAME]') + + parser.add_argument('--os_username', + help=argparse.SUPPRESS) + + parser.add_argument('--os-password', + default=utils.env('OS_PASSWORD'), + help='Defaults to env[OS_PASSWORD]') + + parser.add_argument('--os_password', + help=argparse.SUPPRESS) + + parser.add_argument('--os-tenant-id', + default=utils.env('OS_TENANT_ID'), + help='Defaults to env[OS_TENANT_ID]') + + parser.add_argument('--os_tenant_id', + help=argparse.SUPPRESS) + + parser.add_argument('--os-tenant-name', + default=utils.env('OS_TENANT_NAME'), + help='Defaults to env[OS_TENANT_NAME]') + + parser.add_argument('--os_tenant_name', + help=argparse.SUPPRESS) + + parser.add_argument('--os-auth-url', + default=utils.env('OS_AUTH_URL'), + help='Defaults to env[OS_AUTH_URL]') + + parser.add_argument('--os_auth_url', + help=argparse.SUPPRESS) + + parser.add_argument('--os-region-name', + default=utils.env('OS_REGION_NAME'), + help='Defaults to env[OS_REGION_NAME]') + + parser.add_argument('--os_region_name', + help=argparse.SUPPRESS) + + parser.add_argument('--os-auth-token', + default=utils.env('OS_AUTH_TOKEN'), + help='Defaults to env[OS_AUTH_TOKEN]') + + parser.add_argument('--os_auth_token', + help=argparse.SUPPRESS) + + parser.add_argument('--fm-url', + default=utils.env('FM_URL'), + help='Defaults to env[FM_URL]') + + parser.add_argument('--fm_url', + help=argparse.SUPPRESS) + + parser.add_argument('--fm-api-version', + default=utils.env('FM_API_VERSION', default='1'), + help='Defaults to env[FM_API_VERSION] ' + 'or 1') + + parser.add_argument('--fm_api_version', + help=argparse.SUPPRESS) + + parser.add_argument('--os-service-type', + default=utils.env('OS_SERVICE_TYPE', + default=client.SERVICE_TYPE), + help='Defaults to env[OS_SERVICE_TYPE]') + + parser.add_argument('--os_service_type', + help=argparse.SUPPRESS) + + parser.add_argument('--os-endpoint-type', + default=utils.env('OS_ENDPOINT_TYPE'), + help='Defaults to env[OS_ENDPOINT_TYPE]') + + parser.add_argument('--os_endpoint_type', + help=argparse.SUPPRESS) + + parser.add_argument('--os-user-domain-id', + default=utils.env('OS_USER_DOMAIN_ID'), + help='Defaults to env[OS_USER_DOMAIN_ID].') + + parser.add_argument('--os-user-domain-name', + default=utils.env('OS_USER_DOMAIN_NAME'), + help='Defaults to env[OS_USER_DOMAIN_NAME].') + + parser.add_argument('--os-project-id', + default=utils.env('OS_PROJECT_ID'), + help='Another way to specify tenant ID. ' + 'This option is mutually exclusive with ' + ' --os-tenant-id. ' + 'Defaults to env[OS_PROJECT_ID].') + + parser.add_argument('--os-project-name', + default=utils.env('OS_PROJECT_NAME'), + help='Another way to specify tenant name. ' + 'This option is mutually exclusive with ' + ' --os-tenant-name. ' + 'Defaults to env[OS_PROJECT_NAME].') + + parser.add_argument('--os-project-domain-id', + default=utils.env('OS_PROJECT_DOMAIN_ID'), + help='Defaults to env[OS_PROJECT_DOMAIN_ID].') + + parser.add_argument('--os-project-domain-name', + default=utils.env('OS_PROJECT_DOMAIN_NAME'), + help='Defaults to env[OS_PROJECT_DOMAIN_NAME].') + + return parser + + def get_subcommand_parser(self, version): + parser = self.get_base_parser() + + self.subcommands = {} + subparsers = parser.add_subparsers(metavar='') + submodule = importutils.import_versioned_module('fmclient', + version, 'shell') + submodule.enhance_parser(parser, subparsers, self.subcommands) + utils.define_commands_from_module(subparsers, self, self.subcommands) + self._add_bash_completion_subparser(subparsers) + return parser + + def _add_bash_completion_subparser(self, subparsers): + subparser = subparsers.add_parser( + 'bash_completion', + add_help=False, + formatter_class=HelpFormatter + ) + self.subcommands['bash_completion'] = subparser + subparser.set_defaults(func=self.do_bash_completion) + + def _setup_debugging(self, debug): + if debug: + logging.basicConfig( + format="%(levelname)s (%(module)s:%(lineno)d) %(message)s", + level=logging.DEBUG) + + httplib2.debuglevel = 1 + else: + logging.basicConfig(format="%(levelname)s %(message)s", + level=logging.CRITICAL) + + def main(self, argv): + # Parse args once to find version + parser = self.get_base_parser() + (options, args) = parser.parse_known_args(argv) + self._setup_debugging(options.debug) + + # build available subcommands based on version + api_version = options.fm_api_version + subcommand_parser = self.get_subcommand_parser(api_version) + self.parser = subcommand_parser + + # Handle top-level --help/-h before attempting to parse + # a command off the command line + if options.help or not argv: + self.do_help(options) + return 0 + + # Parse args again and call whatever callback was selected + args = subcommand_parser.parse_args(argv) + + # Short-circuit and deal with help command right away. + if args.func == self.do_help: + self.do_help(args) + return 0 + elif args.func == self.do_bash_completion: + self.do_bash_completion(args) + return 0 + + if not (args.os_auth_token and args.fm_url): + if not args.os_username: + raise exc.CommandError("You must provide a username via " + "either --os-username or via " + "env[OS_USERNAME]") + + if not args.os_password: + raise exc.CommandError("You must provide a password via " + "either --os-password or via " + "env[OS_PASSWORD]") + + if not (args.os_project_id or args.os_project_name): + raise exc.CommandError("You must provide a project name via " + "either --os-project-name or via " + "env[OS_PROJECT_NAME]") + + if not args.os_auth_url: + raise exc.CommandError("You must provide an auth url via " + "either --os-auth-url or via " + "env[OS_AUTH_URL]") + + if not args.os_region_name: + raise exc.CommandError("You must provide an region name via " + "either --os-region-name or via " + "env[OS_REGION_NAME]") + + client_args = ( + 'os_auth_token', 'fm_url', 'os_username', 'os_password', + 'os_auth_url', 'os_project_id', 'os_project_name', 'os_tenant_id', + 'os_tenant_name', 'os_region_name', 'os_user_domain_id', + 'os_user_domain_name', 'os_project_domain_id', + 'os_project_domain_name', 'os_service_type', 'os_endpoint_type', + 'timeout' + ) + kwargs = {} + for key in client_args: + client_key = key.replace("os_", "", 1) + kwargs[client_key] = getattr(args, key) + + client = fmclient.client.get_client(api_version, **kwargs) + + try: + args.func(client, args) + except exc.Unauthorized: + raise exc.CommandError("Invalid Identity credentials.") + + def do_bash_completion(self, args): + """Prints all of the commands and options to stdout. + """ + commands = set() + options = set() + for sc_str, sc in self.subcommands.items(): + commands.add(sc_str) + for option in list(sc._optionals._option_string_actions): + options.add(option) + + commands.remove('bash_completion') + print(' '.join(commands | options)) + + @utils.arg('command', metavar='', nargs='?', + help='Display help for ') + def do_help(self, args): + """Display help about this program or one of its subcommands.""" + if getattr(args, 'command', None): + if args.command in self.subcommands: + self.subcommands[args.command].print_help() + else: + raise exc.CommandError("'%s' is not a valid subcommand" % + args.command) + else: + self.parser.print_help() + + +class HelpFormatter(argparse.HelpFormatter): + def start_section(self, heading): + # Title-case the headings + heading = '%s%s' % (heading[0].upper(), heading[1:]) + super(HelpFormatter, self).start_section(heading) + + +def main(): + try: + FmShell().main(sys.argv[1:]) + + except KeyboardInterrupt as e: + print >> sys.stderr, ('caught: %r, aborting' % (e)) + sys.exit(0) + + except IOError as e: + sys.exit(0) + + except Exception as e: + print >> sys.stderr, e + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/python-fmclient/fmclient/fmclient/v1/__init__.py b/python-fmclient/fmclient/fmclient/v1/__init__.py new file mode 100644 index 00000000..b98b5055 --- /dev/null +++ b/python-fmclient/fmclient/fmclient/v1/__init__.py @@ -0,0 +1,5 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/python-fmclient/fmclient/fmclient/v1/alarm.py b/python-fmclient/fmclient/fmclient/v1/alarm.py new file mode 100755 index 00000000..2177af18 --- /dev/null +++ b/python-fmclient/fmclient/fmclient/v1/alarm.py @@ -0,0 +1,53 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + +from fmclient.common import options +from fmclient.common import base + + +class Alarm(base.Resource): + def __repr__(self): + return "" % self._info + + +class AlarmManager(base.Manager): + resource_class = Alarm + + @staticmethod + def _path(id=None): + return '/v1/alarms/%s' % id if id else '/v1/alarms' + + def list(self, q=None, limit=None, marker=None, sort_key=None, + sort_dir=None, include_suppress=False): + params = [] + + if include_suppress: + params.append('include_suppress=True') + if limit: + params.append('limit=%s' % str(limit)) + if marker: + params.append('marker=%s' % str(marker)) + if sort_key: + params.append('sort_key=%s' % str(sort_key)) + if sort_dir: + params.append('sort_dir=%s' % str(sort_dir)) + + return self._list(options.build_url(self._path(), q, params), 'alarms') + + def get(self, iid): + try: + return self._list(self._path(iid))[0] + except IndexError: + return None + + def delete(self, uuid): + return self._delete(self._path(uuid)) + + def summary(self, include_suppress=False): + params = [] + if include_suppress: + params.append('include_suppress=True') + return self._list(options.build_url(self._path('summary'), None, params)) diff --git a/python-fmclient/fmclient/fmclient/v1/alarm_shell.py b/python-fmclient/fmclient/fmclient/v1/alarm_shell.py new file mode 100755 index 00000000..e6e7e9e9 --- /dev/null +++ b/python-fmclient/fmclient/fmclient/v1/alarm_shell.py @@ -0,0 +1,143 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +from fmclient import exc +from fmclient.common import utils +from fmclient.common import wrapping_formatters +from fmclient.common import options + + +def _display_fault(fault): + + fields = ['uuid', 'alarm_id', 'alarm_state', 'entity_type_id', 'entity_instance_id', + 'timestamp', 'severity', 'reason_text', 'alarm_type', + 'probable_cause', 'proposed_repair_action', 'service_affecting', + 'suppression', 'suppression_status', 'mgmt_affecting', 'degrade_affecting'] + data = dict([(f, getattr(fault, f, '')) for f in fields]) + utils.print_dict(data, wrap=72) + + +@utils.arg('alarm', metavar='', help="ID of the alarm to show") +def do_alarm_show(cc, args={}): + '''Show an active alarm.''' + try: + fault = cc.alarm.get(args.alarm) + except exc.HTTPNotFound: + raise exc.CommandError('Alarm not found: %s' % args.alarm) + else: + _display_fault(fault) + + +@utils.arg('alarm', metavar='', help="UUID of the alarm to delete") +def do_alarm_delete(cc, args={}): + '''Delete an active alarm.''' + try: + cc.alarm.delete(args.alarm) + except exc.HTTPNotFound: + raise exc.CommandError('Alarm not found: %s' % args.alarm) + + +@utils.arg('-q', '--query', metavar='', + help='key[op]data_type::value; list. data_type is optional, ' + 'but if supplied must be string, integer, float, or boolean.') +@utils.arg('--uuid', action='store_true', + help='Include UUID in output') +@utils.arg('--include_suppress', + action='store_true', + help='Include suppressed alarms in output') +@utils.arg('--mgmt_affecting', + action='store_true', + help='Include management affecting status in output') +@utils.arg('--degrade_affecting', + action='store_true', + help='Include degrade affecting status in output') +def do_alarm_list(cc, args={}): + '''List all active alarms.''' + + includeUUID = args.uuid + include_suppress = False + + if args.include_suppress: + include_suppress = True + + include_mgmt_affecting = False + if args.mgmt_affecting: + include_mgmt_affecting = True + + include_degrade_affecting = False + if args.degrade_affecting: + include_degrade_affecting = True + faults = cc.alarm.list(q=options.cli_to_array(args.query), include_suppress=include_suppress) + for f in faults: + utils.normalize_field_data(f, ['entity_type_id', 'entity_instance_id', + 'reason_text', 'proposed_repair_action']) + + # omit action initially to keep output width sane + # (can switch over to vertical formatting when available from CLIFF) + + def hightlightAlarmId(alarm): + suppressed = hasattr(alarm, "suppression_status") and alarm.suppression_status == "suppressed" + if suppressed: + value = "S({})".format(alarm.alarm_id) + else: + value = alarm.alarm_id + return value + + field_labels = ['Alarm ID', 'Reason Text', 'Entity ID', 'Severity', 'Time Stamp'] + fields = ['alarm_id', 'reason_text', 'entity_instance_id', 'severity', 'timestamp'] + # for best results, ensure width ratios add up to 1 (=100%) + formatterSpec = {"alarm_id": {"formatter": hightlightAlarmId, "wrapperFormatter": .08}, + "reason_text": .54, + "entity_instance_id": .15, + "severity": .10, + "timestamp": .10, + } + + if includeUUID: + field_labels.insert(0, 'UUID') + fields.insert(0, 'uuid') + # for best results, ensure width ratios add up to 1 (=100%) + formatterSpec['uuid'] = wrapping_formatters.UUID_MIN_LENGTH + formatterSpec['reason_text'] -= .05 + formatterSpec['entity_instance_id'] -= .02 + + if include_mgmt_affecting: + field_labels.insert(4, 'Management Affecting') + fields.insert(4, 'mgmt_affecting') + # for best results, ensure width ratios add up to 1 (=100%) + formatterSpec['mgmt_affecting'] = .08 + formatterSpec['reason_text'] -= .05 + formatterSpec['severity'] -= .03 + + if include_degrade_affecting: + field_labels.insert(5, 'Degrade Affecting') + fields.insert(5, 'degrade_affecting') + # for best results, ensure width ratios add up to 1 (=100%) + formatterSpec['degrade_affecting'] = .08 + formatterSpec['reason_text'] -= .05 + formatterSpec['severity'] -= .03 + + formatters = wrapping_formatters.build_wrapping_formatters(faults, fields, field_labels, formatterSpec) + + utils.print_list(faults, fields, field_labels, formatters=formatters, + sortby=fields.index('timestamp'), reversesort=True) + + +@utils.arg('--include_suppress', + action='store_true', + help='Include suppressed alarms in output') +def do_alarm_summary(cc, args={}): + '''Show a summary of active alarms.''' + + include_suppress = False + + if args.include_suppress: + include_suppress = True + faults = cc.alarm.summary(include_suppress) + field_labels = ['Critical Alarms', 'Major Alarms', 'Minor Alarms', 'Warnings'] + fields = ['critical', 'major', 'minor', 'warnings'] + utils.print_list(faults, fields, field_labels) diff --git a/python-fmclient/fmclient/fmclient/v1/client.py b/python-fmclient/fmclient/fmclient/v1/client.py new file mode 100644 index 00000000..f4eef67b --- /dev/null +++ b/python-fmclient/fmclient/fmclient/v1/client.py @@ -0,0 +1,46 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +from fmclient.common import http +from fmclient.common.http import DEFAULT_VERSION +from fmclient.common.i18n import _ +from fmclient.common import exceptions as exc +from fmclient.v1 import alarm +from fmclient.v1 import event_log +from fmclient.v1 import event_suppression + + +class Client(object): + """Client for the FM v1 API. + + :param string endpoint: A user-supplied endpoint URL for the ironic + service. + :param function token: Provides token for authentication. + :param integer timeout: Allows customization of the timeout for client + http requests. (optional) + """ + + def __init__(self, endpoint=None, session=None, **kwargs): + """Initialize a new client for the FM v1 API.""" + if not session: + if kwargs.get('os_fm_api_version'): + kwargs['api_version_select_state'] = "user" + else: + if not endpoint: + raise exc.EndpointException( + _("Must provide 'endpoint' if os_fm_api_version " + "isn't specified")) + + # If the user didn't specify a version, use a default version + kwargs['api_version_select_state'] = "default" + kwargs['os_fm_api_version'] = DEFAULT_VERSION + + self.http_client = http.get_http_client(endpoint, session, **kwargs) + self.alarm = alarm.AlarmManager(self.http_client) + self.event_log = event_log.EventLogManager(self.http_client) + self.event_suppression = event_suppression.EventSuppressionManager( + self.http_client) diff --git a/python-fmclient/fmclient/fmclient/v1/event_log.py b/python-fmclient/fmclient/fmclient/v1/event_log.py new file mode 100644 index 00000000..22357af7 --- /dev/null +++ b/python-fmclient/fmclient/fmclient/v1/event_log.py @@ -0,0 +1,45 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +from fmclient.common import options +from fmclient.common import base + + +class EventLog(base.Resource): + def __repr__(self): + return "" % self._info + + +class EventLogManager(base.Manager): + resource_class = EventLog + + @staticmethod + def _path(id=None): + return '/v1/event_log/%s' % id if id else '/v1/event_log' + + def list(self, q=None, limit=None, marker=None, alarms=False, logs=False, include_suppress=False): + params = [] + if limit: + params.append('limit=%s' % str(limit)) + if marker: + params.append('marker=%s' % str(marker)) + if include_suppress: + params.append('include_suppress=True') + if alarms is True and logs is False: + params.append('alarms=True') + elif alarms is False and logs is True: + params.append('logs=True') + + restAPIURL = options.build_url(self._path(), q, params) + + return self._list(restAPIURL, 'event_log') + + def get(self, iid): + try: + return self._list(self._path(iid))[0] + except IndexError: + return None diff --git a/python-fmclient/fmclient/fmclient/v1/event_log_shell.py b/python-fmclient/fmclient/fmclient/v1/event_log_shell.py new file mode 100644 index 00000000..046186da --- /dev/null +++ b/python-fmclient/fmclient/fmclient/v1/event_log_shell.py @@ -0,0 +1,124 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +from fmclient import exc +from fmclient.common import utils +from fmclient.common import wrapping_formatters +from fmclient.common import options + + +def _display_event(log): + + fields = ['uuid', 'event_log_id', 'state', 'entity_type_id', + 'entity_instance_id', + 'timestamp', 'severity', 'reason_text', 'event_log_type', + 'probable_cause', 'proposed_repair_action', + 'service_affecting', 'suppression', 'suppression_status'] + data = dict([(f, getattr(log, f, '')) for f in fields]) + utils.print_dict(data, wrap=72) + + +@utils.arg('event_log', metavar='', + help="ID of the event log to show") +def do_event_show(cc, args={}): + '''Show a event log.''' + try: + log = cc.event_log.get(args.event_log) + except exc.HTTPNotFound: + raise exc.CommandError('Event log not found: %s' % args.event_log) + else: + _display_event(log) + + +@utils.arg('-q', '--query', metavar='', + help='key[op]data_type::value; list. data_type is optional, ' + 'but if supplied must be string, integer, float, or boolean. ' + 'Valid query fields (event_log_id, entity_type_id, ' + 'entity_instance_id, severity, start, end)' + ' Example: fm event-list -q \'start=20160131 10:23:45;end=20171225\'') +@utils.arg('-l', '--limit', metavar='', + help='Maximum number of event logs to return.') +@utils.arg('--alarms', + action='store_true', + help='Show alarms only') +@utils.arg('--logs', action='store_true', + help='Show logs only') +@utils.arg('--uuid', action='store_true', + help='Include UUID in output') +@utils.arg('--include_suppress', + action='store_true', + help='Include suppressed alarms in output') +@utils.arg('--nopaging', action='store_true', + help='Output is not paged') +def do_event_list(cc, args={}): + '''List event logs.''' + + queryAsArray = options.cli_to_array(args.query) + + no_paging = args.nopaging + + alarms = False + logs = False + include_suppress = False + + includeUUID = args.uuid + + if args.alarms and not args.logs: + alarms = True + elif args.logs and not args.alarms: + logs = True + + if args.include_suppress: + include_suppress = True + + logs = cc.event_log.list(q=queryAsArray, limit=args.limit, + alarms=alarms, logs=logs, + include_suppress=include_suppress) + for l in logs: + utils.normalize_field_data(l, ['entity_instance_id', 'reason_text']) + + # omit action initially to keep output width sane + # (can switch over to vertical formatting when available from CLIFF) + + def hightlightEventId(event): + suppressed = hasattr(event, "suppression_status") and event.suppression_status == "suppressed" + if suppressed: + value = "S({})".format(event.event_log_id) + else: + value = event.event_log_id + return value + + if includeUUID: + field_labels = ['UUID', 'Time Stamp', 'State', 'Event Log ID', 'Reason Text', + 'Entity Instance ID', 'Severity'] + fields = ['uuid', 'timestamp', 'state', 'event_log_id', 'reason_text', + 'entity_instance_id', 'severity'] + formatterSpec = {"uuid": wrapping_formatters.UUID_MIN_LENGTH, + "timestamp": .08, + "state": .08, + "event_log_id": {"formatter": hightlightEventId, "wrapperFormatter": .07}, + "reason_text": .42, + "entity_instance_id": .13, + "severity": .12} + else: + field_labels = ['Time Stamp', 'State', 'Event Log ID', 'Reason Text', + 'Entity Instance ID', 'Severity'] + fields = ['timestamp', 'state', 'event_log_id', 'reason_text', + 'entity_instance_id', 'severity'] + # for best results, ensure width ratios add up to 1 (=100%) + formatterSpec = {"timestamp": .08, + "state": .08, + "event_log_id": {"formatter": hightlightEventId, "wrapperFormatter": .07}, + "reason_text": .52, + "entity_instance_id": .13, + "severity": .12} + formatters = wrapping_formatters.build_wrapping_formatters(logs, fields, + field_labels, formatterSpec) + + utils.print_long_list(logs, fields, field_labels, + formatters=formatters, sortby=fields.index('timestamp'), + reversesort=True, no_paging=no_paging) diff --git a/python-fmclient/fmclient/fmclient/v1/event_suppression.py b/python-fmclient/fmclient/fmclient/v1/event_suppression.py new file mode 100644 index 00000000..f3e65a6c --- /dev/null +++ b/python-fmclient/fmclient/fmclient/v1/event_suppression.py @@ -0,0 +1,40 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import json +from fmclient.common import options +from fmclient.common import base + + +class EventSuppression(base.Resource): + def __repr__(self): + return "" % self._info + + +class EventSuppressionManager(base.Manager): + resource_class = EventSuppression + + @staticmethod + def _path(iid=None): + return '/v1/event_suppression/%s' % iid if iid else '/v1/event_suppression' + + def list(self, q=None): + params = [] + + restAPIURL = options.build_url(self._path(), q, params) + + return self._list(restAPIURL, 'event_suppression') + + def get(self, iid): + try: + return self._list(self._path(iid))[0] + except IndexError: + return None + + def update(self, event_suppression_uuid, patch): + return self._update(self._path(event_suppression_uuid), + data=json.dumps(patch)) diff --git a/python-fmclient/fmclient/fmclient/v1/event_suppression_shell.py b/python-fmclient/fmclient/fmclient/v1/event_suppression_shell.py new file mode 100644 index 00000000..522ca7ec --- /dev/null +++ b/python-fmclient/fmclient/fmclient/v1/event_suppression_shell.py @@ -0,0 +1,205 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +from fmclient.common import utils +from fmclient.common import wrapping_formatters +from fmclient.common import options + + +def _get_display_config(includeUUID): + if includeUUID: + field_labels = ['UUID', 'Event ID', 'Status'] + fields = ['uuid', 'alarm_id', 'suppression_status'] + + formatterSpec = {"uuid": 40, + "alarm_id": 25, + "suppression_status": 15} + else: + field_labels = ['Event ID', 'Status'] + fields = ['alarm_id', 'suppression_status'] + + formatterSpec = {"alarm_id": 25, + "suppression_status": 15} + + return { + 'field_labels': field_labels, + 'fields': fields, + 'formatterSpec': formatterSpec + } + + +def _display_event_suppression(log): + fields = ['uuid', 'alarm_id', 'description', 'suppression_status'] + data = dict([(f, getattr(log, f, '')) for f in fields]) + utils.print_dict(data, wrap=72) + + +def _get_suppressed_alarms_tuples(data): + """Split the suppressed_alarms field from a comma separated list alarm id's to a + + real list of (start, end) tuples. ?????? + """ + + suppressed_alarms = [] + for a in data['suppressed_alarms'].split(',') or []: + suppressed_alarms.append((a)) + return suppressed_alarms + + +def _event_suppression_list(cc, include_unsuppressed=False): + query = 'suppression_status=string::suppressed' + queryAsArray = [] + + if include_unsuppressed: + query = None + + if query is not None: + queryAsArray = options.cli_to_array(query) + + event_suppression_list = cc.event_suppression.list(q=queryAsArray) + return event_suppression_list + + +def print_event_suppression_list(cc, no_paging, includeUUID): + + event_suppression_list = _event_suppression_list(cc, include_unsuppressed=False) + + displayCFG = _get_display_config(includeUUID) + + field_labels = displayCFG['field_labels'] + fields = displayCFG['fields'] + formatterSpec = displayCFG['formatterSpec'] + + formatters = wrapping_formatters.build_wrapping_formatters(event_suppression_list, fields, + field_labels, formatterSpec) + + utils.print_long_list(event_suppression_list, fields, field_labels, formatters=formatters, sortby=1, + reversesort=False, no_paging=no_paging) + + +def event_suppression_update(cc, data, suppress=False): + event_suppression_list = _event_suppression_list(cc, include_unsuppressed=True) + + alarm_id_list = [] + for alarm_id in data['alarm_id'].split(',') or []: + alarm_id_list.append(alarm_id) + + if suppress: + patch_value = 'suppressed' + else: + patch_value = 'unsuppressed' + + patch = [] + for event_id in event_suppression_list: + if event_id.alarm_id in alarm_id_list: + print("Alarm ID: {} {}.".format(event_id.alarm_id, patch_value)) + uuid = event_id.uuid + patch.append(dict(path='/' + 'suppression_status', value=patch_value, op='replace')) + cc.event_suppression.update(uuid, patch) + + +@utils.arg('--include-unsuppressed', action='store_true', + help='Include unsuppressed Event ID\'s') +@utils.arg('--uuid', action='store_true', + help='Include UUID in output') +@utils.arg('--nopaging', action='store_true', + help='Output is not paged') +def do_event_suppress_list(cc, args={}): + '''List Suppressed Event ID's ''' + + include_unsuppressed = args.include_unsuppressed + + includeUUID = args.uuid + + event_suppression_list = _event_suppression_list(cc, include_unsuppressed=include_unsuppressed) + + no_paging = args.nopaging + + displayCFG = _get_display_config(includeUUID) + + field_labels = displayCFG['field_labels'] + fields = displayCFG['fields'] + formatterSpec = displayCFG['formatterSpec'] + + formatters = wrapping_formatters.build_wrapping_formatters(event_suppression_list, fields, + field_labels, formatterSpec) + + utils.print_long_list(event_suppression_list, fields, field_labels, formatters=formatters, sortby=1, + reversesort=False, no_paging=no_paging) + + +@utils.arg('--alarm_id', + metavar=',...', + help="The alarm_id list (comma separated) of alarm ID's to suppress.") +@utils.arg('--nopaging', action='store_true', + help='Output is not paged') +@utils.arg('--uuid', action='store_true', + help='Include UUID in output') +def do_event_suppress(cc, args={}): + '''Suppress specified Event ID's.''' + + field_list = ['alarm_id'] + + # Prune input fields down to required/expected values + data = dict((k, v) for (k, v) in vars(args).items() + if k in field_list and not (v is None)) + + if 'alarm_id' in data: + event_suppression_update(cc, data, suppress=True) + + no_paging = args.nopaging + includeUUID = args.uuid + + print_event_suppression_list(cc, no_paging, includeUUID) + + +@utils.arg('--alarm_id', + metavar=',...', + help="The alarm_id list (comma separated) of alarm ID's to unsuppress.") +@utils.arg('--nopaging', action='store_true', + help='Output is not paged') +@utils.arg('--uuid', action='store_true', + help='Include UUID in output') +def do_event_unsuppress(cc, args): + '''Unsuppress specified Event ID's.''' + + field_list = ['alarm_id'] + # Prune input fields down to required/expected values + data = dict((k, v) for (k, v) in vars(args).items() + if k in field_list and not (v is None)) + + if 'alarm_id' in data: + event_suppression_update(cc, data, suppress=False) + + no_paging = args.nopaging + includeUUID = args.uuid + + print_event_suppression_list(cc, no_paging, includeUUID) + + +@utils.arg('--nopaging', action='store_true', + help='Output is not paged') +@utils.arg('--uuid', action='store_true', + help='Include UUID in output') +def do_event_unsuppress_all(cc, args): + '''Unsuppress all Event ID's.''' + patch = [] + alarms_suppression_list = _event_suppression_list(cc, include_unsuppressed=True) + + for alarm_type in alarms_suppression_list: + suppression_status = alarm_type.suppression_status + + if suppression_status == 'suppressed': + uuid = alarm_type.uuid + patch.append(dict(path='/' + 'suppression_status', value='unsuppressed', op='replace')) + print("Alarm ID: {} unsuppressed.".format(alarm_type.alarm_id)) + cc.event_suppression.update(uuid, patch) + + no_paging = args.nopaging + includeUUID = args.uuid + + print_event_suppression_list(cc, no_paging, includeUUID) diff --git a/python-fmclient/fmclient/fmclient/v1/shell.py b/python-fmclient/fmclient/fmclient/v1/shell.py new file mode 100644 index 00000000..8f04ccbd --- /dev/null +++ b/python-fmclient/fmclient/fmclient/v1/shell.py @@ -0,0 +1,31 @@ +# +# Copyright (c) 2018 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +from fmclient.common import utils + +from fmclient.v1 import alarm_shell +from fmclient.v1 import event_log_shell +from fmclient.v1 import event_suppression_shell + + +COMMAND_MODULES = [ + alarm_shell, + event_log_shell, + event_suppression_shell, +] + + +def enhance_parser(parser, subparsers, cmd_mapper): + '''Take a basic (nonversioned) parser and enhance it with + commands and options specific for this version of API. + + :param parser: top level parser :param subparsers: top level + parser's subparsers collection where subcommands will go + ''' + for command_module in COMMAND_MODULES: + utils.define_commands_from_module(subparsers, command_module, + cmd_mapper) diff --git a/python-fmclient/fmclient/setup.cfg b/python-fmclient/fmclient/setup.cfg new file mode 100644 index 00000000..e40a892c --- /dev/null +++ b/python-fmclient/fmclient/setup.cfg @@ -0,0 +1,38 @@ +[metadata] +name = fmclient +version = 1.0.0 +summary = A python client library for Fault Management +#description-file = +# README.rst +#author = OpenStack +#author-email = openstack-dev@lists.openstack.org +#home-page = https://docs.openstack.org/nova/latest/ +classifier = + Environment :: OpenStack + Intended Audience :: Information Technology + Intended Audience :: System Administrators + License :: OSI Approved :: Apache Software License + Operating System :: POSIX :: Linux + Programming Language :: Python + Programming Language :: Python :: 2 + Programming Language :: Python :: 2.7 + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.5 + +[global] +setup-hooks = + pbr.hooks.setup_hook + +[files] +packages = + fmclient + +[entry_points] +console_scripts = + fm = fmclient.shell:main + +[egg_info] +tag_build = +tag_date = 0 +tag_svn_revision = 0 + diff --git a/python-fmclient/fmclient/setup.py b/python-fmclient/fmclient/setup.py new file mode 100644 index 00000000..ae3950a6 --- /dev/null +++ b/python-fmclient/fmclient/setup.py @@ -0,0 +1,30 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT + +import setuptools + +# In python < 2.7.4, a lazy loading of package `pbr` will break +# setuptools if some other modules registered functions in `atexit`. +# solution from: http://bugs.python.org/issue15881#msg170215 +try: + import multiprocessing # noqa +except ImportError: + pass + +setuptools.setup( + setup_requires=['pbr>=2.0.0'], + pbr=True)