Add pagination support

Set limit on how many items are returned from a request.
Allow maxQueryLimit to be configurable.
Return all request as JSON elements. No arrays.
Add merge_metrics flag.

Change-Id: I00a4e2b911241ff934bd3dc9cc5c5caf19e905fb
This commit is contained in:
Deklan Dieterly 2015-02-25 11:03:20 -07:00
parent a1d26e4535
commit 180245a5b7
57 changed files with 1046 additions and 429 deletions

View File

@ -37,6 +37,8 @@ public class ApiConfig extends Configuration {
public String metricsTopic = "metrics";
@NotEmpty
public String eventsTopic = "events";
@NotNull
public int maxQueryLimit;
@NotEmpty
public String alarmStateTransitionsTopic = "alarm-state-transitions";

View File

@ -150,7 +150,7 @@ public class AlarmDefinitionService {
// Have to get information about the Alarms before they are deleted. They will be deleted
// by the database as a cascade delete from the Alarm Definition delete
final List<Alarm> alarms = alarmRepo.find(tenantId, alarmDefId, null, null, null, null);
final List<Alarm> alarms = alarmRepo.find(tenantId, alarmDefId, null, null, null, null, 1, false);
final Map<String, Map<String, AlarmSubExpression>> alarmSubExpressions =
alarmRepo.findAlarmSubExpressionsForAlarmDefinition(alarmDefId);

View File

@ -1,3 +1,16 @@
/*
* Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.api.domain.model.alarm;
import java.util.List;
@ -16,8 +29,8 @@ public interface AlarmRepo {
/**
* Returns alarms for the given criteria.
*/
List<Alarm> find(String tenantId, String alarmDefId, String metricName,
Map<String, String> metricDimensions, AlarmState state, String offset);
List<Alarm> find(String tenantId, String alarmDefId, String metricName, Map<String,
String> metricDimensions, AlarmState state, String offset, int limit, boolean enforceLimit);
/**
* @throws EntityNotFoundException if an alarm cannot be found for the {@code id}

View File

@ -48,7 +48,7 @@ public interface AlarmDefinitionRepo {
* Returns alarms for the given criteria.
*/
List<AlarmDefinition> find(String tenantId, String name, Map<String, String> dimensions,
String offset);
String offset, int limit);
/**
* @throws EntityNotFoundException if an alarm cannot be found for the {@code alarmDefId}

View File

@ -93,6 +93,10 @@ public class AlarmStateHistory extends AbstractEntity {
return true;
}
public void setId(String id) {
this.id = id;
}
public String getAlarmId() {
return alarmId;
}

View File

@ -28,11 +28,11 @@ public interface AlarmStateHistoryRepo {
/**
* @throws EntityNotFoundException if an alarm cannot be found for the {@code alarmId}
*/
List<AlarmStateHistory> findById(String tenantId, String alarmId, String offset) throws Exception;
List<AlarmStateHistory> findById(String tenantId, String alarmId, String offset, int limit) throws Exception;
/**
* Finds AlarmStateHistory for the given criteria.
*/
List<AlarmStateHistory> find(String tenantId, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime, @Nullable String offset) throws Exception;
DateTime startTime, @Nullable DateTime endTime, @Nullable String offset, int limit) throws Exception;
}

View File

@ -1,3 +1,16 @@
/*
* Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.api.domain.model.common;
import java.util.ArrayList;
@ -5,7 +18,7 @@ import java.util.List;
public class Paged {
public static final int LIMIT = 50;
public static final int LIMIT = 10000;
public List<Link> links = new ArrayList<>();

View File

@ -26,6 +26,6 @@ public interface MeasurementRepo {
/**
* Finds measurements for the given criteria.
*/
List<Measurements> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime, @Nullable String offset) throws Exception;
List<Measurements> find(String tenantId, String name, Map<String, String> dimensions, DateTime startTime, @Nullable DateTime endTime, @Nullable String offset,
int limit, Boolean mergeMetricsFlag) throws Exception;
}

View File

@ -26,6 +26,6 @@ public interface MetricDefinitionRepo {
* Finds metrics for the given criteria.
*/
List<MetricDefinition> find(String tenantId, String name, Map<String, String> dimensions,
String offset)
String offset, int limit)
throws Exception;
}

View File

@ -46,5 +46,5 @@ public interface NotificationMethodRepo {
NotificationMethod update(String tenantId, String notificationMethodId, String name,
NotificationMethodType type, String address);
List<NotificationMethod> find(String tenantId, String offset);
List<NotificationMethod> find(String tenantId, String offset, int limit);
}

View File

@ -27,6 +27,7 @@ public interface StatisticRepo {
* Finds statistics for the given criteria.
*/
List<Statistics> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime, List<String> statistics, int period)
DateTime startTime, @Nullable DateTime endTime, List<String> statistics,
int period, String offset, int limit, Boolean mergeMetricsFlag)
throws Exception;
}

View File

@ -18,10 +18,13 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import monasca.common.model.domain.common.AbstractEntity;
/**
* Encapsulates a metric measurements.
*/
public class Statistics {
public class Statistics extends AbstractEntity {
private String name;
private Map<String, String> dimensions;
private List<String> columns;
@ -76,6 +79,10 @@ public class Statistics {
return true;
}
public void setId(String id) {
this.id = id;
}
public void addStatistics(List<Object> statistics) {
this.statistics.add(statistics);
}

View File

@ -29,6 +29,7 @@ import monasca.api.domain.model.measurement.MeasurementRepo;
import monasca.api.domain.model.metric.MetricDefinitionRepo;
import monasca.api.domain.model.notificationmethod.NotificationMethodRepo;
import monasca.api.domain.model.statistic.StatisticRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.api.infrastructure.persistence.influxdb.InfluxV8AlarmStateHistoryRepo;
import monasca.api.infrastructure.persistence.influxdb.InfluxV8MeasurementRepo;
import monasca.api.infrastructure.persistence.influxdb.InfluxV8MetricDefinitionRepo;
@ -38,6 +39,7 @@ import monasca.api.infrastructure.persistence.influxdb.InfluxV9MeasurementRepo;
import monasca.api.infrastructure.persistence.influxdb.InfluxV9MetricDefinitionRepo;
import monasca.api.infrastructure.persistence.influxdb.InfluxV9RepoReader;
import monasca.api.infrastructure.persistence.influxdb.InfluxV9StatisticRepo;
import monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils;
import monasca.api.infrastructure.persistence.mysql.AlarmDefinitionMySqlRepoImpl;
import monasca.api.infrastructure.persistence.mysql.AlarmMySqlRepoImpl;
import monasca.api.infrastructure.persistence.mysql.NotificationMethodMySqlRepoImpl;
@ -93,6 +95,8 @@ public class InfrastructureModule extends AbstractModule {
} else if (config.influxDB.getVersion().trim().equalsIgnoreCase(INFLUXDB_V9)) {
bind(PersistUtils.class).in(Singleton.class);
bind(InfluxV9Utils.class).in(Singleton.class);
bind(InfluxV9RepoReader.class).in(Singleton.class);
bind(AlarmStateHistoryRepo.class).to(InfluxV9AlarmStateHistoryRepo.class)

View File

@ -0,0 +1,66 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.api.infrastructure.persistence;
import com.google.inject.Inject;
import monasca.api.ApiConfig;
public class PersistUtils {
private final int maxQueryLimit;
private final int DEFAULT_MAX_QUERY_LIMIT = 10000;
@Inject
public PersistUtils(ApiConfig config) {
this.maxQueryLimit = config.maxQueryLimit;
}
public PersistUtils(int maxQueryLimit) {
this.maxQueryLimit = maxQueryLimit;
}
public PersistUtils() {
this.maxQueryLimit = DEFAULT_MAX_QUERY_LIMIT;
}
public int getLimit(String limit) {
if (limit == null || limit.isEmpty()) {
return this.maxQueryLimit;
}
int limitInt;
try {
limitInt = Integer.parseInt(limit);
} catch (NumberFormatException e) {
throw new IllegalArgumentException(String.format("Found invalid Limit: '%1$s'. Limit must be an integer.", limit));
}
if (limitInt <= this.maxQueryLimit) {
return limitInt;
} else {
return this.maxQueryLimit;
}
}
}

View File

@ -22,17 +22,21 @@ public final class SubAlarmDefinitionQueries {
private SubAlarmDefinitionQueries() {}
public static String buildJoinClauseFor(Map<String, String> dimensions) {
StringBuilder sbJoin = null;
StringBuilder sbJoin = new StringBuilder();
if (dimensions != null) {
sbJoin = new StringBuilder();
for (int i = 0; i < dimensions.size(); i++) {
sbJoin.append(" inner join sub_alarm_definition_dimension d").append(i).append(" on d").append(i)
.append(".dimension_name = :dname").append(i).append(" and d").append(i)
.append("" + ".value = :dvalue").append(i).append(" and dim.sub_alarm_definition_id = d")
.append(".value = :dvalue").append(i).append(" and dim.sub_alarm_definition_id = d")
.append(i).append(".sub_alarm_definition_id");
}
}
return sbJoin == null ? "" : sbJoin.toString();
return sbJoin.toString();
}
}

View File

@ -27,9 +27,6 @@ import org.influxdb.dto.Serie;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import org.skife.jdbi.v2.util.StringMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -44,7 +41,6 @@ import monasca.common.model.alarm.AlarmTransitionSubAlarm;
import monasca.common.model.metric.MetricDefinition;
import monasca.api.domain.model.alarmstatehistory.AlarmStateHistory;
import monasca.api.domain.model.alarmstatehistory.AlarmStateHistoryRepo;
import monasca.api.infrastructure.persistence.DimensionQueries;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV8Utils.buildAlarmsPart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV8Utils.findAlarmIds;
@ -78,7 +74,11 @@ public class InfluxV8AlarmStateHistoryRepo implements AlarmStateHistoryRepo {
}
@Override
public List<AlarmStateHistory> findById(String tenantId, String alarmId, String offset) throws Exception {
public List<AlarmStateHistory> findById(String tenantId, String alarmId,
String offset, int limit) throws Exception {
// Limit is not implemented for Influxdb V8.
// InfluxDB orders queries by time stamp desc by default.
String query = buildQueryForFindById(tenantId, alarmId, offset);
return queryInfluxDBForAlarmStateHistory(query);
@ -93,7 +93,9 @@ public class InfluxV8AlarmStateHistoryRepo implements AlarmStateHistoryRepo {
@Override
public List<AlarmStateHistory> find(String tenantId, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime, String offset) throws Exception {
DateTime startTime, @Nullable DateTime endTime, String offset, int limit) throws Exception {
// Limit is not implemented for Influxdb V8.
List<String> alarmIdList = findAlarmIds(this.mysql, tenantId, dimensions);

View File

@ -65,11 +65,15 @@ public class InfluxV8MeasurementRepo implements MeasurementRepo {
}
@Override
public List<Measurements> find(String tenantId, String name,
Map<String, String> dimensions, DateTime startTime,
@Nullable DateTime endTime, @Nullable String offset)
public List<Measurements> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime, @Nullable String offset,
int limit, Boolean mergeMetricsFlag)
throws Exception {
// mergeMetricsFlag is not implemented for Influxdb V8.
// Limit is not implemented for Influxdb V8.
String serieNameRegex = buildSerieNameRegex(tenantId, config.region, name, dimensions);
String timePart = InfluxV8Utils.WhereClauseBuilder.buildTimePart(startTime, endTime);

View File

@ -51,7 +51,9 @@ public class InfluxV8MetricDefinitionRepo implements MetricDefinitionRepo {
@Override
public List<MetricDefinition> find(String tenantId, String name, Map<String, String> dimensions,
String offset) throws Exception {
String offset, int limit) throws Exception {
// Limit is not implemented for Influxdb V8.
String serieNameRegex = buildSerieNameRegex(tenantId, config.region, name, dimensions);

View File

@ -57,9 +57,14 @@ public class InfluxV8StatisticRepo implements StatisticRepo {
@Override
public List<Statistics> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime,
List<String> statistics, int period)
List<String> statistics, int period, String offset, int limit,
Boolean mergeMetricsFlag)
throws Exception {
// mergeMetricsFlag is not implemented for Influxdb V8.
// Limit is not implemented for Influxdb V8.
String serieNameRegex = buildSerieNameRegex(tenantId, config.region, name, dimensions);
String statsPart = buildStatsPart(statistics);
String timePart = InfluxV8Utils.WhereClauseBuilder.buildTimePart(startTime, endTime);
@ -87,7 +92,7 @@ public class InfluxV8StatisticRepo implements StatisticRepo {
private List<Statistics> buildStatisticsList(List<String> statistics, List<Serie> result)
throws Exception {
List<Statistics> statisticsList = new LinkedList<Statistics>();
List<Statistics> statisticsList = new LinkedList<>();
for (Serie serie : result) {
InfluxV8Utils.SerieNameDecoder serieNameDecoder;
@ -104,7 +109,7 @@ public class InfluxV8StatisticRepo implements StatisticRepo {
colNamesList.add(0, "timestamp");
statistic.setColumns(colNamesList);
statistic.setDimensions(serieNameDecoder.getDimensions());
List<List<Object>> valObjArryArry = new LinkedList<List<Object>>();
List<List<Object>> valObjArryArry = new LinkedList<>();
statistic.setStatistics(valObjArryArry);
for (Map<String, Object> row : serie.getRows()) {
List<Object> valObjArry = new ArrayList<>();

View File

@ -269,10 +269,14 @@ final class InfluxV8Utils {
public static List<String> findAlarmIds(DBI mysql, String tenantId,
Map<String, String> dimensions) {
final String FIND_ALARMS_SQL = "select distinct a.id from alarm as a " +
"join alarm_definition as ad on a.alarm_definition_id=ad.id " +
"%s " +
"where ad.tenant_id = :tenantId and ad.deleted_at is NULL order by ad.created_at";
final String
FIND_ALARMS_SQL = "select distinct a.id "
+ "from alarm as a "
+ "join alarm_definition as ad on a.alarm_definition_id = ad.id "
+ "%s "
+ "where ad.tenant_id = :tenantId and ad.deleted_at is NULL "
+ "order by ad.created_at";
List<String> alarmIdList = null;
try (Handle h = mysql.open()) {

View File

@ -43,8 +43,6 @@ import monasca.common.model.alarm.AlarmState;
import monasca.common.model.alarm.AlarmTransitionSubAlarm;
import monasca.common.model.metric.MetricDefinition;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV8Utils.WhereClauseBuilder.buildTimePart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV8Utils.buildAlarmsPart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV8Utils.findAlarmIds;
public class InfluxV9AlarmStateHistoryRepo implements AlarmStateHistoryRepo {
@ -56,6 +54,7 @@ public class InfluxV9AlarmStateHistoryRepo implements AlarmStateHistoryRepo {
private final ApiConfig config;
private final String region;
private final InfluxV9RepoReader influxV9RepoReader;
private final InfluxV9Utils influxV9Utils;
private final ObjectMapper objectMapper = new ObjectMapper();
private final SimpleDateFormat simpleDateFormat =
@ -70,23 +69,30 @@ public class InfluxV9AlarmStateHistoryRepo implements AlarmStateHistoryRepo {
@Inject
public InfluxV9AlarmStateHistoryRepo(@Named("mysql") DBI mysql,
ApiConfig config,
InfluxV9RepoReader influxV9RepoReader) {
InfluxV9RepoReader influxV9RepoReader,
InfluxV9Utils influxV9Utils) {
this.mysql = mysql;
this.config = config;
this.region = config.region;
this.influxV9RepoReader = influxV9RepoReader;
this.influxV9Utils = influxV9Utils;
}
@Override
public List<AlarmStateHistory> findById(String tenantId, String alarmId, String offset)
public List<AlarmStateHistory> findById(String tenantId, String alarmId, String offset,
int limit)
throws Exception {
String q = String.format("select alarm_id, metrics, old_state, new_state, sub_alarms, reason, reason_data "
+ "from alarm_state_history where tenant_id = '%1$s' and alarm_id = '%2$s'",
InfluxV8Utils.SQLSanitizer.sanitize(tenantId),
InfluxV8Utils.SQLSanitizer.sanitize(alarmId));
String q = String.format("select alarm_id, metrics, old_state, new_state, reason, reason_data "
+ "from alarm_state_history "
+ "where %1$s %2$s %3$s %4$s",
this.influxV9Utils.tenantIdPart(tenantId),
this.influxV9Utils.alarmIdPart(alarmId),
this.influxV9Utils.timeOffsetPart(offset),
this.influxV9Utils.limitPart(limit));
logger.debug("Alarm state history query: {}", q);
@ -104,7 +110,7 @@ public class InfluxV9AlarmStateHistoryRepo implements AlarmStateHistoryRepo {
@Override
public List<AlarmStateHistory> find(String tenantId, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime,
@Nullable String offset) throws Exception {
@Nullable String offset, int limit) throws Exception {
List<String> alarmIdList = findAlarmIds(this.mysql, tenantId, dimensions);
@ -112,12 +118,15 @@ public class InfluxV9AlarmStateHistoryRepo implements AlarmStateHistoryRepo {
return new ArrayList<>();
}
String timePart = buildTimePart(startTime, endTime);
String alarmsPart = buildAlarmsPart(alarmIdList);
String q = String.format("select alarm_id, metrics, old_state, new_state, sub_alarms, reason, reason_data "
+ "from alarm_state_history where tenant_id = '%1$s' %2$s %3$s",
InfluxV8Utils.SQLSanitizer.sanitize(tenantId), timePart, alarmsPart);
String q = String.format("select alarm_id, metrics, old_state, new_state, reason, reason_data "
+ "from alarm_state_history "
+ "where %1$s %2$s %3$s %4$s %5$s",
this.influxV9Utils.tenantIdPart(tenantId),
this.influxV9Utils.startTimeEndTimePart(startTime, endTime),
this.influxV9Utils.alarmIdsPart(alarmIdList),
this.influxV9Utils.timeOffsetPart(offset),
this.influxV9Utils.limitPart(limit));
logger.debug("Alarm state history list query: {}", q);

View File

@ -15,12 +15,14 @@ package monasca.api.infrastructure.persistence.influxdb;
import com.google.inject.Inject;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@ -32,46 +34,42 @@ import monasca.api.ApiConfig;
import monasca.api.domain.model.measurement.MeasurementRepo;
import monasca.api.domain.model.measurement.Measurements;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.dimPart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.endTimePart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.namePart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.regionPart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.startTimePart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.tenantIdPart;
public class InfluxV9MeasurementRepo implements MeasurementRepo {
private static final Logger logger = LoggerFactory
.getLogger(InfluxV9MeasurementRepo.class);
private final static TypeReference VALUE_META_TYPE = new TypeReference<Map<String, String>>() {};
private final ApiConfig config;
private final String region;
private final InfluxV9RepoReader influxV9RepoReader;
private final InfluxV9Utils influxV9Utils;
private final InfluxV9MetricDefinitionRepo influxV9MetricDefinitionRepo;
private final ObjectMapper objectMapper = new ObjectMapper();
@Inject
public InfluxV9MeasurementRepo(ApiConfig config,
InfluxV9RepoReader influxV9RepoReader) {
InfluxV9RepoReader influxV9RepoReader,
InfluxV9Utils influxV9Utils,
InfluxV9MetricDefinitionRepo influxV9MetricDefinitionRepo) {
this.config = config;
this.region = config.region;
this.influxV9RepoReader = influxV9RepoReader;
this.influxV9Utils = influxV9Utils;
this.influxV9MetricDefinitionRepo = influxV9MetricDefinitionRepo;
}
@Override
public List<Measurements> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime,
@Nullable String offset) throws Exception {
@Nullable String offset, int limit, Boolean mergeMetricsFlag) throws Exception {
String q = buildQuery(tenantId, name, dimensions, startTime, endTime,
offset, limit, mergeMetricsFlag);
String q = String.format("select value %1$s where %2$s %3$s %4$s %5$s %6$s", namePart(name),
tenantIdPart(tenantId), regionPart(this.region), startTimePart(startTime),
dimPart(dimensions), endTimePart(endTime));
logger.debug("Measurements query: {}", q);
String r = this.influxV9RepoReader.read(q);
@ -84,6 +82,51 @@ public class InfluxV9MeasurementRepo implements MeasurementRepo {
return measurementsList;
}
private String buildQuery(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, DateTime endTime, String offset, int limit,
Boolean mergeMetricsFlag) throws Exception {
String q;
if (Boolean.TRUE.equals(mergeMetricsFlag)) {
// Had to use * to handle value meta. If we select valueMeta and it does not exist, then error.
q = String.format("select * %1$s "
+ "where %2$s %3$s %4$s %5$s %6$s %7$s %8$s",
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.tenantIdPart(tenantId),
this.influxV9Utils.regionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.timeOffsetPart(offset),
this.influxV9Utils.limitPart(limit));
} else {
if (!this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) {
throw new IllegalArgumentException(this.influxV9Utils.getMultipleMetricsErrorMsg());
}
// Had to use * to handle value meta. If we select valueMeta and it does not exist, then error.
q = String.format("select * %1$s "
+ "where %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s slimit 1",
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.tenantIdPart(tenantId),
this.influxV9Utils.regionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.timeOffsetPart(offset),
this.influxV9Utils.groupByPart(),
this.influxV9Utils.limitPart(limit));
}
logger.debug("Measurements query: {}", q);
return q;
}
private List<Measurements> measurementsList(Series series) {
@ -93,15 +136,13 @@ public class InfluxV9MeasurementRepo implements MeasurementRepo {
for (Serie serie : series.getSeries()) {
// Influxdb 0.9.0 does not return dimensions at this time.
Measurements measurements = new Measurements(serie.getName(), new HashMap());
Measurements measurements = new Measurements(serie.getName(), serie.getTags());
for (String[] values : serie.getValues()) {
// TODO: Really support valueMeta
final Map<String, String> valueMeta = new HashMap<>();
measurements.addMeasurement(new Object[]{values[0], values[0], Double.parseDouble(values[1]),
valueMeta});
measurements.addMeasurement(
new Object[]{values[0], values[0], Double.parseDouble(values[1]),
getValueMeta(values)});
}
measurementsList.add(measurements);
@ -111,4 +152,22 @@ public class InfluxV9MeasurementRepo implements MeasurementRepo {
return measurementsList;
}
private Map<String, String> getValueMeta(String[] values) {
Map<String, String> valueMeta = new HashMap();
if (values.length >= 3 && values[2] != null && !values[2].isEmpty()) {
try {
valueMeta =
this.objectMapper.readValue(values[2], VALUE_META_TYPE);
} catch (IOException e) {
logger.error("Failed to parse value metadata: {}", values[2], e);
}
}
return valueMeta;
}
}

View File

@ -29,10 +29,6 @@ import monasca.api.ApiConfig;
import monasca.api.domain.model.metric.MetricDefinitionRepo;
import monasca.common.model.metric.MetricDefinition;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.dimPart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.namePart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.regionPart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.tenantIdPart;
public class InfluxV9MetricDefinitionRepo implements MetricDefinitionRepo {
@ -40,6 +36,7 @@ public class InfluxV9MetricDefinitionRepo implements MetricDefinitionRepo {
private final ApiConfig config;
private final InfluxV9RepoReader influxV9RepoReader;
private final InfluxV9Utils influxV9Utils;
private final String region;
private final ObjectMapper objectMapper = new ObjectMapper();
@ -47,23 +44,25 @@ public class InfluxV9MetricDefinitionRepo implements MetricDefinitionRepo {
@Inject
public InfluxV9MetricDefinitionRepo(ApiConfig config,
InfluxV9RepoReader influxV9RepoReader) {
InfluxV9RepoReader influxV9RepoReader,
InfluxV9Utils influxV9Utils) {
this.config = config;
this.region = config.region;
this.influxV9RepoReader = influxV9RepoReader;
this.influxV9Utils = influxV9Utils;
}
@Override
public List<MetricDefinition> find(String tenantId, String name,
Map<String, String> dimensions,
String offset) throws Exception {
boolean isAtMostOneSeries(String tenantId, String name, Map<String, String> dimensions)
throws Exception {
String
q =
String.format("show series %1$s where %2$s %3$s %4$s", namePart(name), tenantIdPart(
tenantId),
regionPart(this.region), dimPart(dimensions));
// Set limit to 2. We only care if we get 0, 1, or 2 results back.
String q = String.format("show series %1$s "
+ "where %2$s %3$s %4$s limit 2",
this.influxV9Utils.namePart(name, false),
this.influxV9Utils.tenantIdPart(tenantId),
this.influxV9Utils.regionPart(this.region),
this.influxV9Utils.dimPart(dimensions));
logger.debug("Metric definition query: {}", q);
@ -71,24 +70,63 @@ public class InfluxV9MetricDefinitionRepo implements MetricDefinitionRepo {
Series series = this.objectMapper.readValue(r, Series.class);
List<MetricDefinition> metricDefinitionList = metricDefinitionList(series);
List<MetricDefinition> metricDefinitionList = metricDefinitionList(series, 0);
logger.debug("Found {} metric definitions matching query", metricDefinitionList.size());
return metricDefinitionList.size() > 1 ? false : true;
}
@Override
public List<MetricDefinition> find(String tenantId, String name,
Map<String, String> dimensions,
String offset, int limit) throws Exception {
int startIndex = this.influxV9Utils.startIndex(offset);
String
q =
String.format("show series %1$s where %2$s %3$s %4$s %5$s %6$s",
this.influxV9Utils.namePart(name, false),
this.influxV9Utils.tenantIdPart(tenantId),
this.influxV9Utils.regionPart(this.region),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.limitPart(limit),
this.influxV9Utils.offsetPart(startIndex));
logger.debug("Metric definition query: {}", q);
String r = this.influxV9RepoReader.read(q);
Series series = this.objectMapper.readValue(r, Series.class);
List<MetricDefinition> metricDefinitionList = metricDefinitionList(series, startIndex);
logger.debug("Found {} metric definitions matching query", metricDefinitionList.size());
return metricDefinitionList;
}
private List<MetricDefinition> metricDefinitionList(Series series) {
private List<MetricDefinition> metricDefinitionList(Series series, int startIndex) {
List<MetricDefinition> metricDefinitionList = new ArrayList<>();
if (!series.isEmpty()) {
int index = startIndex;
for (Serie serie : series.getSeries()) {
for (String[] values : serie.getValues()) {
metricDefinitionList.add(new MetricDefinition(serie.getName(), dims(values, serie.getColumns())));
MetricDefinition m = new MetricDefinition(serie.getName(), dims(values, serie.getColumns()));
m.setId(String.valueOf(index++));
metricDefinitionList.add(m);
}
}
}

View File

@ -84,6 +84,8 @@ public class InfluxV9RepoReader {
int rc = response.getStatusLine().getStatusCode();
logger.debug("Received {} status code from influx database", rc);
if (rc != HttpStatus.SC_OK) {
HttpEntity entity = response.getEntity();

View File

@ -22,7 +22,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@ -33,12 +32,6 @@ import monasca.api.ApiConfig;
import monasca.api.domain.model.statistic.StatisticRepo;
import monasca.api.domain.model.statistic.Statistics;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.dimPart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.endTimePart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.namePart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.regionPart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.startTimePart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils.tenantIdPart;
public class InfluxV9StatisticRepo implements StatisticRepo{
@ -48,30 +41,34 @@ public class InfluxV9StatisticRepo implements StatisticRepo{
private final ApiConfig config;
private final String region;
private final InfluxV9RepoReader influxV9RepoReader;
private final InfluxV9Utils influxV9Utils;
private final InfluxV9MetricDefinitionRepo influxV9MetricDefinitionRepo;
private final ObjectMapper objectMapper = new ObjectMapper();
@Inject
public InfluxV9StatisticRepo(ApiConfig config,
InfluxV9RepoReader influxV9RepoReader) {
InfluxV9RepoReader influxV9RepoReader,
InfluxV9Utils influxV9Utils,
InfluxV9MetricDefinitionRepo influxV9MetricDefinitionRepo) {
this.config = config;
this.region = config.region;
this.influxV9RepoReader = influxV9RepoReader;
this.influxV9Utils = influxV9Utils;
this.influxV9MetricDefinitionRepo = influxV9MetricDefinitionRepo;
}
@Override
public List<Statistics> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime,
List<String> statistics, int period) throws Exception {
List<String> statistics, int period, String offset, int limit,
Boolean mergeMetricsFlag) throws Exception {
String q = String.format("select %1$s %2$s where %3$s %4$s %5$s %6$s %7$s %8$s",
funcPart(statistics), namePart(name), tenantIdPart(tenantId),
regionPart(this.region), startTimePart(startTime), dimPart(dimensions),
endTimePart(endTime), periodPart(period));
logger.debug("Measurements query: {}", q);
String q = buildQuery(tenantId, name, dimensions, startTime, endTime,
statistics, period, offset, limit, mergeMetricsFlag);
String r = this.influxV9RepoReader.read(q);
@ -85,6 +82,55 @@ public class InfluxV9StatisticRepo implements StatisticRepo{
}
private String buildQuery(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, DateTime endTime, List<String> statistics,
int period, String offset, int limit, Boolean mergeMetricsFlag)
throws Exception {
String q;
if (Boolean.TRUE.equals(mergeMetricsFlag)) {
q = String.format("select %1$s %2$s "
+ "where %3$s %4$s %5$s %6$s %7$s %8$s %9$s %10$s",
funcPart(statistics),
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.tenantIdPart(tenantId),
this.influxV9Utils.regionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.timeOffsetPart(offset),
this.influxV9Utils.periodPart(period),
this.influxV9Utils.limitPart(limit));
} else {
if (!this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) {
throw new IllegalArgumentException(this.influxV9Utils.getMultipleMetricsErrorMsg());
}
q = String.format("select %1$s %2$s "
+ "where %3$s %4$s %5$s %6$s %7$s %8$s %9$s %10$s slimit 1",
funcPart(statistics),
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.tenantIdPart(tenantId),
this.influxV9Utils.regionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.timeOffsetPart(offset),
this.influxV9Utils.periodPartWithGroupBy(period),
this.influxV9Utils.limitPart(limit));
}
logger.debug("Statistics query: {}", q);
return q;
}
private List<Statistics> statisticslist(Series series) {
@ -94,7 +140,7 @@ public class InfluxV9StatisticRepo implements StatisticRepo{
for (Serie serie : series.getSeries()) {
Statistics statistics = new Statistics(serie.getName(), new HashMap<String, String>(),
Statistics statistics = new Statistics(serie.getName(), serie.getTags(),
Arrays.asList(translateNames(serie.getColumns())));
for (Object[] values : serie.getValues()) {
@ -125,12 +171,14 @@ public class InfluxV9StatisticRepo implements StatisticRepo{
private String funcPart(List<String> statistics) {
StringBuilder sb = new StringBuilder();
for (String stat : statistics) {
if (sb.length() != 0) {
sb.append(",");
}
if (stat.trim().toLowerCase().equals("avg")) {
sb.append(" mean(value)");
sb.append("mean(value)");
} else {
sb.append(String.format("%1$s(value)", stat));
}
@ -138,8 +186,4 @@ public class InfluxV9StatisticRepo implements StatisticRepo{
return sb.toString();
}
private String periodPart(int period) {
return period >= 1 ? String.format("group by time(%1$ds)", period) : "";
}
}

View File

@ -1,49 +1,111 @@
/*
* Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.api.infrastructure.persistence.influxdb;
import com.google.inject.Inject;
import org.joda.time.DateTime;
import org.joda.time.format.ISODateTimeFormat;
import java.util.List;
import java.util.Map;
import monasca.api.infrastructure.persistence.PersistUtils;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV8Utils.SQLSanitizer.sanitize;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV8Utils.WhereClauseBuilder.buildTimePart;
import static monasca.api.infrastructure.persistence.influxdb.InfluxV8Utils.buildAlarmsPart;
public class InfluxV9Utils {
public static String namePart(String name) throws Exception {
private static final String
MULTIPLE_METRICS_ERROR_MSG =
"Found multiple metrics matching search criteria. "
+ "Please refine your search criteria using additional dimensions "
+ "or specify merge_metrics=true as query param to combine "
+ "all metrics into a single series.";
if (name != null && !name.isEmpty()) {
sanitize(name);
return String.format("from \"%1$s\"", name);
} else {
private final PersistUtils persistUtils;
@Inject
public InfluxV9Utils(PersistUtils persistUtils) {
this.persistUtils = persistUtils;
}
public String groupByPart() {
return " group by *";
}
public String namePart(String name, boolean isRequired) throws Exception {
if (isRequired) {
if (name == null || name.isEmpty()) {
throw new Exception(String.format("Found null or empty name: %1$s", name));
}
}
if (name == null || name.isEmpty()) {
return "";
} else {
return String.format(" from \"%1$s\"", sanitize(name));
}
}
public static String tenantIdPart(String tenantId) throws Exception {
public String tenantIdPart(String tenantId) throws Exception {
if (tenantId == null || tenantId.isEmpty()) {
throw new Exception(String.format("Found null or empty tenant id: %1$s", tenantId));
}
sanitize(tenantId);
return "tenant_id=" + "'" + tenantId + "'";
return " tenant_id=" + "'" + sanitize(tenantId) + "'";
}
public static String regionPart(String region) throws Exception {
public String alarmIdPart(String alarmId) {
if (alarmId == null || alarmId.isEmpty()) {
return "";
}
return " and alarm_id=" + "'" + alarmId + "'";
}
public String timeOffsetPart(String offset) {
if (offset == null || offset.isEmpty()) {
return "";
}
return String.format(" and time > '%1$s'", offset);
}
public String regionPart(String region) throws Exception {
if (region == null || region.isEmpty()) {
throw new Exception(String.format("Found null or empty region: %1$s", region));
}
sanitize(region);
return " and region=" + "'" + region + "'";
return " and region=" + "'" + sanitize(region) + "'";
}
public static String dimPart(Map<String, String> dims) throws Exception {
public String dimPart(Map<String, String> dims) throws Exception {
StringBuilder sb = new StringBuilder();
@ -51,9 +113,7 @@ public class InfluxV9Utils {
for (String k : dims.keySet()) {
String v = dims.get(k);
if (k != null && !k.isEmpty() && v != null && !v.isEmpty()) {
sanitize(k);
sanitize(v);
sb.append(" and " + k + "=" + "'" + v + "'");
sb.append(" and " + sanitize(k) + "=" + "'" + sanitize(v) + "'");
}
}
}
@ -61,11 +121,65 @@ public class InfluxV9Utils {
return sb.toString();
}
public static String startTimePart (DateTime startTime) {
return startTime != null ? " and time > " + "'" + ISODateTimeFormat.dateTime().print(startTime) + "'" : "";
public String startTimePart(DateTime startTime) {
return startTime != null ? " and time > " + "'" + ISODateTimeFormat.dateTime().print(startTime)
+ "'" : "";
}
public static String endTimePart (DateTime endTime) {
return endTime != null ? " and time < " + "'" + ISODateTimeFormat.dateTime().print(endTime) + "'" : "";
public String endTimePart(DateTime endTime) {
return endTime != null ? " and time < " + "'" + ISODateTimeFormat.dateTime().print(endTime)
+ "'" : "";
}
public String limitPart(int limit) {
// We add 1 to limit to determine if we need to insert a next link.
return String.format(" limit %1$d", limit + 1);
}
public String offsetPart(int startIndex) {
return String.format(" offset %1$d", startIndex);
}
public int startIndex(String offset) {
if (offset == null || offset.isEmpty()) {
return 0;
}
// We've already returned up to offset, so return offset + 1.
return Integer.parseInt(offset) + 1;
}
public String startTimeEndTimePart(DateTime startTime, DateTime endTime) {
return buildTimePart(startTime, endTime);
}
public String alarmIdsPart(List<String> alarmIdList) {
return buildAlarmsPart(alarmIdList);
}
public String periodPartWithGroupBy(int period) {
return period > 0 ? String.format(" group by time(%1$ds), * fill(0)", period)
: " group by time(300s), * fill(0)";
}
public String periodPart(int period) {
return period > 0 ? String.format(" group by time(%1$ds) fill(0)", period)
: " group by time(300s) fill(0)";
}
public String getMultipleMetricsErrorMsg() {
return MULTIPLE_METRICS_ERROR_MSG;
}
}

View File

@ -13,6 +13,9 @@
*/
package monasca.api.infrastructure.persistence.influxdb;
import java.util.HashMap;
import java.util.Map;
public class Series {
public SeriesElement[] results;
@ -23,6 +26,15 @@ public class Series {
return this.results[0].series == null;
}
int getSeriesLength() {
if (!isEmpty()) {
return this.results[0].series.length;
} else {
return 0;
}
}
Serie[] getSeries() {
return this.results[0].series;
@ -44,14 +56,20 @@ class SeriesElement {
class Serie {
public String name;
public String[] columns;
public String[][] values;
// Initialize to defaults to avoid NPE.
public String name = "";
Map<String, String> tags = new HashMap();
public String[] columns = new String[0];
public String[][] values = new String[0][0];
public String getName() {
return name;
}
public Map getTags() {
return tags;
}
public String[] getColumns() {
return columns;
}

View File

@ -34,7 +34,7 @@ import org.skife.jdbi.v2.StatementContext;
import com.google.common.base.Joiner;
import com.google.common.collect.Iterables;
import monasca.api.domain.model.common.Paged;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.common.model.alarm.AggregateFunction;
import monasca.common.model.alarm.AlarmOperator;
import monasca.common.model.alarm.AlarmState;
@ -48,6 +48,7 @@ import monasca.api.infrastructure.persistence.SubAlarmDefinitionQueries;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
/**
* Alarm repository implementation.
*/
@ -59,10 +60,12 @@ public class AlarmDefinitionMySqlRepoImpl implements AlarmDefinitionRepo {
+ "on sad.sub_alarm_definition_id = sa.id where sa.alarm_definition_id = :alarmDefId";
private final DBI db;
private final PersistUtils persistUtils;
@Inject
public AlarmDefinitionMySqlRepoImpl(@Named("mysql") DBI db) {
public AlarmDefinitionMySqlRepoImpl(@Named("mysql") DBI db, PersistUtils persistUtils) {
this.db = db;
this.persistUtils = persistUtils;
}
@Override
@ -135,35 +138,43 @@ public class AlarmDefinitionMySqlRepoImpl implements AlarmDefinitionRepo {
@SuppressWarnings("unchecked")
@Override
public List<AlarmDefinition> find(String tenantId, String name,
Map<String, String> dimensions, String offset) {
Map<String, String> dimensions, String offset, int limit) {
try (Handle h = db.open()) {
String query = "SELECT alarmdefinition.*, "
+ "GROUP_CONCAT(alarm_action.alarm_state) AS states, GROUP_CONCAT(alarm_action.action_id) AS notificationIds FROM (select distinct alarm_definition.id,tenant_id,"
+ "name,description, severity, expression, match_by, actions_enabled, alarm_definition.created_at, alarm_definition.updated_at,"
+ " alarm_definition.deleted_at FROM alarm_definition LEFT OUTER JOIN sub_alarm_definition sub ON alarm_definition.id = sub.alarm_definition_id"
+ " LEFT OUTER JOIN sub_alarm_definition_dimension dim ON sub.id = dim.sub_alarm_definition_id%s WHERE tenant_id = :tenantId"
+ " AND deleted_at IS NULL %s ORDER BY %s alarm_definition.created_at %s) AS alarmdefinition LEFT OUTER JOIN alarm_action ON alarmdefinition.id=alarm_action.alarm_definition_id"
+ " GROUP BY alarmdefinition.id";
String query =
" SELECT t.id, t.tenant_id, t.name, t.description, t.expression, t.severity, t.match_by,"
+ " t.actions_enabled, t.created_at, t.updated_at, t.deleted_at, "
+ " GROUP_CONCAT(aa.alarm_state) AS states, "
+ " GROUP_CONCAT(aa.action_id) AS notificationIds "
+ "FROM (SELECT distinct ad.id, ad.tenant_id, ad.name, ad.description, ad.expression,"
+ " ad.severity, ad.match_by, ad.actions_enabled, ad.created_at, "
+ " ad.updated_at, ad.deleted_at "
+ " FROM alarm_definition AS ad "
+ " LEFT OUTER JOIN sub_alarm_definition AS sad ON ad.id = sad.alarm_definition_id "
+ " LEFT OUTER JOIN sub_alarm_definition_dimension AS dim ON sad.id = dim.sub_alarm_definition_id %1$s "
+ " WHERE ad.tenant_id = :tenantId AND ad.deleted_at IS NULL %2$s limit :limit) AS t "
+ "LEFT OUTER JOIN alarm_action AS aa ON t.id = aa.alarm_definition_id "
+ "GROUP BY t.id ORDER BY t.id, t.created_at";
StringBuilder sbWhere = new StringBuilder();
if (name != null) {
sbWhere.append(" and alarm_definition.name = :name");
sbWhere.append(" and ad.name = :name");
}
if (offset != null) {
sbWhere.append(" and alarm_definition.id > :offset");
sbWhere.append(" and ad.id > :offset");
}
String orderBy = offset != null ? "alarm_definition.id," : "";
String limit = offset != null ? " limit :limit" : "";
String sql = String.format(query,
SubAlarmDefinitionQueries.buildJoinClauseFor(dimensions), sbWhere, orderBy,
limit);
Query<?> q = h.createQuery(sql).bind("tenantId", tenantId);
SubAlarmDefinitionQueries.buildJoinClauseFor(dimensions), sbWhere);
Query<?> q = h.createQuery(sql);
q.bind("tenantId", tenantId);
if (name != null) {
q.bind("name", name);
@ -171,8 +182,10 @@ public class AlarmDefinitionMySqlRepoImpl implements AlarmDefinitionRepo {
if (offset != null) {
q.bind("offset", offset);
q.bind("limit", Paged.LIMIT);
}
q.bind("limit", limit + 1);
q.registerMapper(new AlarmDefinitionMapper());
q = q.mapTo(AlarmDefinition.class);
DimensionQueries.bindDimensionsToQuery(q, dimensions);
@ -185,12 +198,12 @@ public class AlarmDefinitionMySqlRepoImpl implements AlarmDefinitionRepo {
public AlarmDefinition findById(String tenantId, String alarmDefId) {
try (Handle h = db.open()) {
String query = "SELECT alarm_definition.id, alarm_definition.tenant_id, alarm_definition.name, alarm_definition.description,"
+ "alarm_definition.expression, alarm_definition.severity, alarm_definition.match_by, alarm_definition.actions_enabled,"
String query = "SELECT alarm_definition.id, alarm_definition.tenant_id, alarm_definition.name, alarm_definition.description, "
+ "alarm_definition.expression, alarm_definition.severity, alarm_definition.match_by, alarm_definition.actions_enabled, "
+" alarm_definition.created_at, alarm_definition.updated_at, alarm_definition.deleted_at, "
+ "GROUP_CONCAT(alarm_action.action_id) AS notificationIds,group_concat(alarm_action.alarm_state) AS states "
+ "FROM alarm_definition LEFT OUTER JOIN alarm_action ON alarm_definition.id=alarm_action.alarm_definition_id "
+ " WHERE alarm_definition.tenant_id=:tenantId AND alarm_definition.id=:alarmDefId AND alarm_definition.deleted_at"
+ " WHERE alarm_definition.tenant_id=:tenantId AND alarm_definition.id=:alarmDefId AND alarm_definition.deleted_at "
+ " IS NULL GROUP BY alarm_definition.id";
Query<?> q = h.createQuery(query);
@ -342,11 +355,12 @@ public class AlarmDefinitionMySqlRepoImpl implements AlarmDefinitionRepo {
}
private static class AlarmDefinitionMapper implements ResultSetMapper<AlarmDefinition> {
private static final Splitter COMMA_SPLITTER = Splitter.on(',')
.omitEmptyStrings().trimResults();
public AlarmDefinition map(int index, ResultSet r, StatementContext ctx)
throws SQLException {
private static final Splitter
COMMA_SPLITTER =
Splitter.on(',').omitEmptyStrings().trimResults();
public AlarmDefinition map(int index, ResultSet r, StatementContext ctx) throws SQLException {
String notificationIds = r.getString("notificationIds");
String states = r.getString("states");
String matchBy = r.getString("match_by");
@ -360,29 +374,26 @@ public class AlarmDefinitionMySqlRepoImpl implements AlarmDefinitionRepo {
int stateAndActionIndex = 0;
for (String singleState : state) {
if (singleState.equals(AlarmState.UNDETERMINED.name()))
{
if (singleState.equals(AlarmState.UNDETERMINED.name())) {
undeterminedActionIds.add(notifications.get(stateAndActionIndex));
}
if (singleState.equals(AlarmState.OK.name()))
{
if (singleState.equals(AlarmState.OK.name())) {
okActionIds.add(notifications.get(stateAndActionIndex));
}
if (singleState.equals(AlarmState.ALARM.name()))
{
if (singleState.equals(AlarmState.ALARM.name())) {
alarmActionIds.add(notifications.get(stateAndActionIndex));
}
stateAndActionIndex++;
}
return new AlarmDefinition(r.getString("id"), r.getString("name"),
r.getString("description"), r.getString("severity"),
r.getString("expression"), match, r.getBoolean("actions_enabled"),
alarmActionIds, okActionIds, undeterminedActionIds);
}
return new AlarmDefinition(r.getString("id"), r.getString("name"), r.getString("description"),
r.getString("severity"), r.getString("expression"), match,
r.getBoolean("actions_enabled"), alarmActionIds, okActionIds,
undeterminedActionIds);
}
private List<String> splitStringIntoList(String commaDelimitedString) {
if(commaDelimitedString==null)
{
if (commaDelimitedString == null) {
return new ArrayList<String>();
}
Iterable<String> split = COMMA_SPLITTER.split(commaDelimitedString);

View File

@ -16,8 +16,8 @@ package monasca.api.infrastructure.persistence.mysql;
import monasca.api.domain.exception.EntityNotFoundException;
import monasca.api.domain.model.alarm.Alarm;
import monasca.api.domain.model.alarm.AlarmRepo;
import monasca.api.domain.model.common.Paged;
import monasca.api.infrastructure.persistence.DimensionQueries;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.common.model.alarm.AlarmState;
import monasca.common.model.alarm.AlarmSubExpression;
import monasca.common.model.metric.MetricDefinition;
@ -45,6 +45,8 @@ public class AlarmMySqlRepoImpl implements AlarmRepo {
private static final Logger logger = LoggerFactory.getLogger(AlarmMySqlRepoImpl.class);
private final DBI db;
private final PersistUtils persistUtils;
private static final String ALARM_SQL =
"select ad.id as alarm_definition_id, ad.severity, ad.name as alarm_definition_name, "
+ "a.id, a.state, "
@ -58,8 +60,9 @@ public class AlarmMySqlRepoImpl implements AlarmRepo {
+ "where ad.tenant_id = :tenantId and ad.deleted_at is null %s order by a.id %s";
@Inject
public AlarmMySqlRepoImpl(@Named("mysql") DBI db) {
public AlarmMySqlRepoImpl(@Named("mysql") DBI db, PersistUtils persistUtils) {
this.db = db;
this.persistUtils = persistUtils;
}
private void buildJoinClauseFor(Map<String, String> dimensions, StringBuilder sbJoin) {
@ -88,7 +91,8 @@ public class AlarmMySqlRepoImpl implements AlarmRepo {
@Override
public List<Alarm> find(String tenantId, String alarmDefId, String metricName,
Map<String, String> metricDimensions, AlarmState state, String offset) {
Map<String, String> metricDimensions, AlarmState state, String offset,
int limit, boolean enforceLimit) {
try (Handle h = db.open()) {
@ -105,8 +109,11 @@ public class AlarmMySqlRepoImpl implements AlarmRepo {
+ "inner join (select distinct id from metric_definition "
+ " where name = :metricName) as md "
+ "on md.id = mdd.metric_definition_id ");
buildJoinClauseFor(metricDimensions, sbWhere);
sbWhere.append(")");
}
if (state != null) {
sbWhere.append(" and a.state = :state");
@ -114,9 +121,13 @@ public class AlarmMySqlRepoImpl implements AlarmRepo {
if (offset != null) {
sbWhere.append(" and a.id > :offset");
}
String limit = offset != null ? " limit :limit" : "";
String sql = String.format(ALARM_SQL, sbWhere, limit);
String limitPart = "";
if (enforceLimit) {
limitPart = " limit :limit";
}
String sql = String.format(ALARM_SQL, sbWhere, limitPart);
final Query<Map<String, Object>> q = h.createQuery(sql).bind("tenantId", tenantId);
if (alarmDefId != null) {
@ -130,7 +141,10 @@ public class AlarmMySqlRepoImpl implements AlarmRepo {
}
if (offset != null) {
q.bind("offset", offset);
q.bind("limit", Paged.LIMIT);
}
if (enforceLimit) {
q.bind("limit", limit + 1);
}
DimensionQueries.bindDimensionsToQuery(q, metricDimensions);

View File

@ -21,15 +21,16 @@ import javax.inject.Named;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import monasca.api.domain.exception.EntityExistsException;
import monasca.api.domain.exception.EntityNotFoundException;
import monasca.api.domain.model.common.Paged;
import monasca.api.domain.model.notificationmethod.NotificationMethod;
import monasca.api.domain.model.notificationmethod.NotificationMethodRepo;
import monasca.api.domain.model.notificationmethod.NotificationMethodType;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.common.persistence.BeanMapper;
/**
@ -39,10 +40,12 @@ public class NotificationMethodMySqlRepoImpl implements NotificationMethodRepo {
private static final Logger LOG = LoggerFactory
.getLogger(NotificationMethodMySqlRepoImpl.class);
private final DBI db;
private final PersistUtils persistUtils;
@Inject
public NotificationMethodMySqlRepoImpl(@Named("mysql") DBI db) {
public NotificationMethodMySqlRepoImpl(@Named("mysql") DBI db, PersistUtils persistUtils) {
this.db = db;
this.persistUtils = persistUtils;
}
@Override
@ -94,22 +97,34 @@ public class NotificationMethodMySqlRepoImpl implements NotificationMethodRepo {
}
@Override
public List<NotificationMethod> find(String tenantId, String offset) {
public List<NotificationMethod> find(String tenantId, String offset, int limit) {
try (Handle h = db.open()) {
String rawQuery =
" SELECT nm.id, nm.tenant_id, nm.name, nm.type, nm.address, nm.created_at, nm.updated_at "
+ "FROM notification_method as nm "
+ "WHERE tenant_id = :tenantId %1$s order by nm.id asc limit :limit";
String offsetPart = "";
if (offset != null) {
return h.createQuery(
"select * from notification_method where tenant_id = :tenantId and id > :offset order by id asc limit :limit")
.bind("tenantId", tenantId).bind("offset", offset).bind("limit", Paged.LIMIT)
.map(new BeanMapper<NotificationMethod>(NotificationMethod.class)).list();
} else {
return h.createQuery("select * from notification_method where tenant_id = :tenantId")
.bind("tenantId", tenantId)
.map(new BeanMapper<NotificationMethod>(NotificationMethod.class)).list();
offsetPart = "and nm.id > :offset";
}
String query = String.format(rawQuery, offsetPart);
Query<?> q = h.createQuery(query);
q.bind("tenantId", tenantId);
if (offset != null) {
q.bind("offset", offset);
}
q.bind("limit", limit + 1);
return (List<NotificationMethod>) q.map(new BeanMapper<NotificationMethod>(NotificationMethod.class)).list();
}
}

View File

@ -61,9 +61,9 @@ public class AlarmStateHistoryVerticaRepoImpl implements AlarmStateHistoryRepo {
}
@Override
public List<AlarmStateHistory> findById(String tenantId, String alarmId, String offset) {
public List<AlarmStateHistory> findById(String tenantId, String alarmId, String offset, int limit) {
// Todo. Use offset for pagination.
// Todo. Use offset and limit for pagination.
try (Handle h = vertica.open()) {
return h
@ -76,9 +76,9 @@ public class AlarmStateHistoryVerticaRepoImpl implements AlarmStateHistoryRepo {
@Override
public List<AlarmStateHistory> find(String tenantId, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime, @Nullable String offset) {
DateTime startTime, @Nullable DateTime endTime, @Nullable String offset, int limit) {
// Todo. Use offset for pagination.
// Todo. Use offset and limit for pagination.
List<String> alarmIds = null;

View File

@ -56,10 +56,13 @@ public class MeasurementVerticaRepoImpl implements MeasurementRepo {
}
@Override
public List<Measurements> find(String tenantId, String name,
Map<String, String> dimensions, DateTime startTime, @Nullable DateTime endTime, @Nullable String offset) {
public List<Measurements> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime, @Nullable String offset, int limit,
Boolean mergeMetricsFlag) {
// Todo. Use offset for pagination.
// Todo. Use mergeMetricsFlag.
// Todo. Use offset and limit for pagination.
try (Handle h = db.open()) {
// Build sql

View File

@ -49,8 +49,9 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
@Override
public List<MetricDefinition> find(String tenantId, String name, Map<String, String> dimensions,
String offset) {
// Todo. Use offset for pagination.
String offset, int limit) {
// Todo. Use offset and limit for pagination.
try (Handle h = db.open()) {
// Build sql

View File

@ -56,7 +56,13 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
@Override
public List<Statistics> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, DateTime endTime, List<String> statistics, int period) {
DateTime startTime, DateTime endTime, List<String> statistics,
int period, String offset, int limit, Boolean mergeMetricsFlag) {
// Todo. Use mergeMetricsFlag.
// Todo. Use offset and limit for pagination.
List<Statistics> listStats = new ArrayList<>();
List<String> copyStatistics = createColumns(statistics);

View File

@ -48,6 +48,7 @@ import monasca.api.app.validation.AlarmValidation;
import monasca.api.app.validation.Validation;
import monasca.api.domain.model.alarmdefinition.AlarmDefinition;
import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.api.resource.annotation.PATCH;
import monasca.common.model.alarm.AlarmExpression;
@ -58,13 +59,17 @@ import monasca.common.model.alarm.AlarmExpression;
public class AlarmDefinitionResource {
private final AlarmDefinitionService service;
private final AlarmDefinitionRepo repo;
private final PersistUtils persistUtils;
public final static String ALARM_DEFINITIONS = "alarm-definitions";
public final static String ALARM_DEFINITIONS_PATH = "/v2.0/" + ALARM_DEFINITIONS;
@Inject
public AlarmDefinitionResource(AlarmDefinitionService service, AlarmDefinitionRepo repo) {
public AlarmDefinitionResource(AlarmDefinitionService service,
AlarmDefinitionRepo repo,
PersistUtils persistUtils) {
this.service = service;
this.repo = repo;
this.persistUtils = persistUtils;
}
@POST
@ -88,12 +93,16 @@ public class AlarmDefinitionResource {
public Object list(@Context UriInfo uriInfo,
@HeaderParam("X-Tenant-Id") String tenantId, @QueryParam("name") String name,
@QueryParam("dimensions") String dimensionsStr,
@QueryParam("offset") String offset) {
@QueryParam("offset") String offset,
@QueryParam("limit") String limit){
Map<String, String> dimensions =
Strings.isNullOrEmpty(dimensionsStr) ? null : Validation
.parseAndValidateDimensions(dimensionsStr);
return Links.paginate(offset, Links.hydrate(repo.find(tenantId, name, dimensions, offset), uriInfo), uriInfo);
return Links.paginate(this.persistUtils.getLimit(limit),
Links.hydrate(repo.find(tenantId, name, dimensions, offset,
this.persistUtils.getLimit(limit)), uriInfo),
uriInfo);
}
@GET

View File

@ -45,6 +45,7 @@ import monasca.api.app.validation.Validation;
import monasca.api.domain.model.alarm.Alarm;
import monasca.api.domain.model.alarm.AlarmRepo;
import monasca.api.domain.model.alarmstatehistory.AlarmStateHistoryRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.api.resource.annotation.PATCH;
import monasca.common.model.alarm.AlarmState;
@ -55,14 +56,17 @@ import monasca.common.model.alarm.AlarmState;
public class AlarmResource {
private final AlarmService service;
private final AlarmRepo repo;
private final PersistUtils persistUtils;
private final AlarmStateHistoryRepo stateHistoryRepo;
@Inject
public AlarmResource(AlarmService service, AlarmRepo repo,
AlarmStateHistoryRepo stateHistoryRepo) {
AlarmStateHistoryRepo stateHistoryRepo,
PersistUtils persistUtils) {
this.service = service;
this.repo = repo;
this.stateHistoryRepo = stateHistoryRepo;
this.persistUtils = persistUtils;
}
@DELETE
@ -95,9 +99,12 @@ public class AlarmResource {
@Produces(MediaType.APPLICATION_JSON)
public Object getStateHistory(@Context UriInfo uriInfo,
@HeaderParam("X-Tenant-Id") String tenantId, @PathParam("alarm_id") String alarmId,
@QueryParam("offset") String offset)
@QueryParam("offset") String offset,
@QueryParam("limit") String limit)
throws Exception {
return Links.paginate(offset, stateHistoryRepo.findById(tenantId, alarmId, offset), uriInfo);
return Links.paginate(this.persistUtils.getLimit(limit),
stateHistoryRepo.findById(tenantId, alarmId, offset,
this.persistUtils.getLimit(limit)), uriInfo);
}
@GET
@ -108,7 +115,8 @@ public class AlarmResource {
@Context UriInfo uriInfo,
@HeaderParam("X-Tenant-Id") String tenantId, @QueryParam("dimensions") String dimensionsStr,
@QueryParam("start_time") String startTimeStr, @QueryParam("end_time") String endTimeStr,
@QueryParam("offset") String offset)
@QueryParam("offset") String offset,
@QueryParam("limit") String limit)
throws Exception {
// Validate query parameters
@ -120,7 +128,9 @@ public class AlarmResource {
Strings.isNullOrEmpty(dimensionsStr) ? null : Validation
.parseAndValidateDimensions(dimensionsStr);
return Links.paginate(offset, stateHistoryRepo.find(tenantId, dimensions, startTime, endTime, offset), uriInfo);
return Links.paginate(this.persistUtils.getLimit(limit),
stateHistoryRepo.find(tenantId, dimensions, startTime,
endTime, offset, this.persistUtils.getLimit(limit)), uriInfo);
}
@GET
@ -131,16 +141,18 @@ public class AlarmResource {
@QueryParam("metric_name") String metricName,
@QueryParam("metric_dimensions") String metricDimensionsStr,
@QueryParam("state") AlarmState state,
@QueryParam("offset") String offset) throws Exception {
@QueryParam("offset") String offset,
@QueryParam("limit") String limit)
throws Exception {
Map<String, String> metricDimensions =
Strings.isNullOrEmpty(metricDimensionsStr) ? null : Validation
.parseAndValidateNameAndDimensions(metricName, metricDimensionsStr);
final List<Alarm> alarms = repo.find(tenantId, alarmDefId, metricName, metricDimensions, state,
offset);
offset, this.persistUtils.getLimit(limit), true);
for (final Alarm alarm : alarms) {
Links.hydrate(alarm.getAlarmDefinition(), uriInfo, AlarmDefinitionResource.ALARM_DEFINITIONS_PATH);
}
return Links.paginate(offset, Links.hydrate(alarms, uriInfo), uriInfo);
return Links.paginate(this.persistUtils.getLimit(limit), Links.hydrate(alarms, uriInfo), uriInfo);
}
@PATCH

View File

@ -13,11 +13,8 @@
*/
package monasca.api.resource;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.UriInfo;
@ -26,6 +23,7 @@ import com.google.common.base.Preconditions;
import monasca.api.ApiConfig;
import monasca.api.domain.model.common.Paged;
import monasca.api.domain.model.measurement.Measurements;
import monasca.api.domain.model.statistic.Statistics;
import monasca.common.model.domain.common.AbstractEntity;
import monasca.api.domain.model.common.Link;
import monasca.api.domain.model.common.Linked;
@ -142,106 +140,203 @@ public final class Links {
}
public static Object paginate(String offset, List<? extends AbstractEntity> elements,
UriInfo uriInfo) {
if (offset != null) {
/**
* This method handles the case that the elements list size is one greater than the
* limit. The next link will be created automatically.
*
* This method also handles the case that the element size is the limit. The next
* link will not be created.
*
* The convention is for methods that query the DB to request limit + 1 elements.
*
* Only limit number of elements will be returned.
*
* @param limit
* @param elements
* @param uriInfo
* @return
*/
public static Object paginate(int limit, List<? extends AbstractEntity> elements, UriInfo uriInfo) {
// Check for paging turned off. Happens if maxQueryLimit is not set. Used for V8 compatibility.
if (limit == 0) {
Paged paged = new Paged();
Link selfLink = new Link();
selfLink.rel = "self";
selfLink.href = uriInfo.getRequestUri().toString();
paged.links.add(selfLink);
paged.elements = elements != null ? elements : new ArrayList<>();
return paged;
}
if (elements != null) {
if (elements.size() >= Paged.LIMIT) {
Link nextLink = new Link();
nextLink.rel = "next";
// Create a new URL with the new offset.
nextLink.href =
uriInfo.getAbsolutePath().toString() + "?offset=" + elements.get(elements.size() - 1)
.getId();
// Add the query parms back to the URL without the original offset.
for (String parmKey : uriInfo.getQueryParameters().keySet()) {
if (!parmKey.equalsIgnoreCase("offset")) {
List<String> parmValList = uriInfo.getQueryParameters().get(parmKey);
for (String parmVal : parmValList) {
nextLink.href += "&" + parmKey + "=" + parmVal;
}
}
}
paged.links.add(nextLink);
}
Paged paged = new Paged();
paged.links.add(getSelfLink(uriInfo));
if (elements != null) {
if (elements.size() > limit) {
String offset = elements.get(limit - 1).getId();
paged.links.add(getNextLink(offset, uriInfo));
// Truncate the list. Normally this will just truncate one extra element.
elements = elements.subList(0, limit);
}
paged.elements = elements != null ? elements : new ArrayList();
return paged;
paged.elements = elements;
} else {
Paged paged = new Paged();
paged.elements = elements;
return paged;
paged.elements = new ArrayList();
}
return paged;
}
public static Object paginateMeasurements(String offset, Measurements measurements,
UriInfo uriInfo) throws UnsupportedEncodingException {
if (offset != null) {
public static Object paginateMeasurements(int limit, List<Measurements> elements, UriInfo uriInfo) {
// Check for paging turned off. Happens if maxQueryLimit is not set. Used for V8 compatibility.
if (limit == 0) {
Paged paged = new Paged();
Link selfLink = new Link();
selfLink.rel = "self";
selfLink.href = uriInfo.getRequestUri().toString();
paged.links.add(selfLink);
if (measurements.getMeasurements().size() >= Paged.LIMIT) {
Link nextLink = new Link();
nextLink.rel = "next";
// Create a new URL with the new offset.
nextLink.href = uriInfo.getAbsolutePath().toString() + "?offset=" + measurements.getId();
// Add the query parms back to the URL without the original offset and dimensions.
for (String parmKey : uriInfo.getQueryParameters().keySet()) {
if (!parmKey.equalsIgnoreCase("offset") && !parmKey.equalsIgnoreCase("dimensions")) {
List<String> parmValList = uriInfo.getQueryParameters().get(parmKey);
for (String parmVal : parmValList) {
nextLink.href += "&" + parmKey + "=" + parmVal;
}
}
}
// Add the dimensions for this particular measurement.
Map<String, String> dimensionsMap = measurements.getDimensions();
if (dimensionsMap != null && !dimensionsMap.isEmpty()) {
nextLink.href += "&dimensions=";
boolean firstDimension = true;
for (String dimensionKey : dimensionsMap.keySet()) {
String dimensionVal = dimensionsMap.get(dimensionKey);
if (firstDimension) {
firstDimension = false;
} else {
nextLink.href += URLEncoder.encode(",", "UTF-8");
}
nextLink.href += dimensionKey + URLEncoder.encode(":", "UTF-8") + dimensionVal;
}
}
paged.links.add(nextLink);
}
List<Measurements> measurementsList = new ArrayList();
measurementsList.add(measurements);
paged.elements = measurementsList;
paged.elements = elements != null ? elements : new ArrayList<>();
return paged;
}
Paged paged = new Paged();
paged.links.add(getSelfLink(uriInfo));
if (elements != null && !elements.isEmpty()) {
Measurements m = elements.get(0);
if (m != null) {
List<Object[]> l = m.getMeasurements();
if (l.size() > limit) {
String offset = (String) l.get(limit - 1)[0];
m.setId(offset);
paged.links.add(getNextLink(offset, uriInfo));
// Truncate the list. Normally this will just truncate one extra element.
l = l.subList(0, limit);
m.setMeasurements(l);
}
// Check if there are any elements.
if (l.size() > 0) {
// Set the id to the last date in the list.
m.setId((String) l.get(l.size() - 1)[0]);
}
paged.elements = elements;
} else {
paged.elements = new ArrayList<>();
}
} else {
return measurements;
paged.elements = new ArrayList();
}
return paged;
}
public static Object paginateStatistics(int limit, List<Statistics> elements, UriInfo uriInfo) {
// Check for paging turned off. Happens if maxQueryLimit is not set. Used for V8 compatibility.
if (limit == 0) {
Paged paged = new Paged();
paged.elements = elements != null ? elements : new ArrayList<>();
return paged;
}
Paged paged = new Paged();
paged.links.add(getSelfLink(uriInfo));
if (elements != null && !elements.isEmpty()) {
Statistics s = elements.get(0);
if (s != null) {
List<List<Object>> l = s.getStatistics();
if (l.size() > limit) {
String offset = (String) l.get(limit - 1).get(0);
s.setId(offset);
paged.links.add(getNextLink(offset, uriInfo));
// Truncate the list. Normally this will just truncate one extra element.
l = l.subList(0, limit);
s.setStatistics(l);
}
// Check if there are any elements.
if (l.size() > 0) {
// Set the id to the last date in the list.
s.setId((String) l.get(l.size() - 1).get(0));
}
paged.elements = elements;
} else {
paged.elements = new ArrayList<>();
}
} else {
paged.elements = new ArrayList();
}
return paged;
}
private static Link getSelfLink(UriInfo uriInfo) {
Link selfLink = new Link();
selfLink.rel = "self";
selfLink.href = uriInfo.getRequestUri().toString();
return selfLink;
}
private static Link getNextLink(String offset, UriInfo uriInfo) {
Link nextLink = new Link();
nextLink.rel = "next";
// Create a new URL with the new offset.
nextLink.href = uriInfo.getAbsolutePath().toString() + "?offset=" + offset;
// Add the query parms back to the URL without the original offset.
for (String parmKey : uriInfo.getQueryParameters().keySet()) {
if (!parmKey.equalsIgnoreCase("offset")) {
List<String> parmValList = uriInfo.getQueryParameters().get(parmKey);
for (String parmVal : parmValList) {
nextLink.href += "&" + parmKey + "=" + parmVal;
}
}
}
return nextLink;
}
}

View File

@ -19,8 +19,6 @@ import com.codahale.metrics.annotation.Timed;
import org.joda.time.DateTime;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
@ -34,9 +32,8 @@ import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.UriInfo;
import monasca.api.app.validation.Validation;
import monasca.api.domain.model.common.Paged;
import monasca.api.domain.model.measurement.MeasurementRepo;
import monasca.api.domain.model.measurement.Measurements;
import monasca.api.infrastructure.persistence.PersistUtils;
/**
* Measurement resource implementation.
@ -45,10 +42,12 @@ import monasca.api.domain.model.measurement.Measurements;
public class MeasurementResource {
private final MeasurementRepo repo;
private final PersistUtils persistUtils;
@Inject
public MeasurementResource(MeasurementRepo repo) {
public MeasurementResource(MeasurementRepo repo, PersistUtils persistUtils) {
this.repo = repo;
this.persistUtils = persistUtils;
}
@GET
@ -57,7 +56,10 @@ public class MeasurementResource {
public Object get(@Context UriInfo uriInfo, @HeaderParam("X-Tenant-Id") String tenantId,
@QueryParam("name") String name, @QueryParam("dimensions") String dimensionsStr,
@QueryParam("start_time") String startTimeStr,
@QueryParam("end_time") String endTimeStr, @QueryParam("offset") String offset)
@QueryParam("end_time") String endTimeStr,
@QueryParam("offset") String offset,
@QueryParam("limit") String limit,
@QueryParam("merge_metrics") Boolean mergeMetricsFlag)
throws Exception {
// Validate query parameters
@ -69,21 +71,9 @@ public class MeasurementResource {
Strings.isNullOrEmpty(dimensionsStr) ? null : Validation
.parseAndValidateNameAndDimensions(name, dimensionsStr);
List<Measurements> measurementsList = repo.find(tenantId, name, dimensions, startTime, endTime, offset);
List<Object> pagedList = new LinkedList();
return Links.paginateMeasurements(this.persistUtils.getLimit(limit),
repo.find(tenantId, name, dimensions, startTime, endTime,
offset, this.persistUtils.getLimit(limit), mergeMetricsFlag), uriInfo);
for (Measurements measurements : measurementsList) {
pagedList.add(Links.paginateMeasurements(offset, measurements, uriInfo));
}
if (offset != null) {
Paged paged = new Paged();
paged.elements = pagedList;
return paged;
} else {
Paged paged = new Paged();
paged.elements = pagedList;
return paged;
}
}
}

View File

@ -39,6 +39,7 @@ import monasca.api.app.MetricService;
import monasca.api.app.command.CreateMetricCommand;
import monasca.api.app.validation.Validation;
import monasca.api.domain.model.metric.MetricDefinitionRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.api.resource.exception.Exceptions;
import monasca.common.model.Services;
import monasca.common.model.metric.Metric;
@ -51,13 +52,16 @@ public class MetricResource {
private static final String MONITORING_DELEGATE_ROLE = "monitoring-delegate";
private static final Splitter COMMA_SPLITTER = Splitter.on(',').omitEmptyStrings().trimResults();
private final MetricService service;
private final MetricService service;
private final MetricDefinitionRepo metricRepo;
private final PersistUtils persistUtils;
@Inject
public MetricResource(MetricService service, MetricDefinitionRepo metricRepo) {
public MetricResource(MetricService service, MetricDefinitionRepo metricRepo,
PersistUtils persistUtils) {
this.service = service;
this.metricRepo = metricRepo;
this.persistUtils = persistUtils;
}
@POST
@ -96,16 +100,18 @@ public class MetricResource {
@GET
@Timed
@Produces(MediaType.APPLICATION_JSON)
public Object getMetrics(@Context UriInfo uriInfo,
@HeaderParam("X-Tenant-Id") String tenantId,
@QueryParam("name") String name,
@QueryParam("dimensions") String dimensionsStr,
@QueryParam("offset") String offset) throws Exception {
public Object getMetrics(@Context UriInfo uriInfo, @HeaderParam("X-Tenant-Id") String tenantId,
@QueryParam("name") String name,
@QueryParam("dimensions") String dimensionsStr,
@QueryParam("offset") String offset,
@QueryParam("limit") String limit)
throws Exception {
Map<String, String>
dimensions =
Strings.isNullOrEmpty(dimensionsStr) ? null : Validation
.parseAndValidateNameAndDimensions(name, dimensionsStr);
return Links.paginate(offset, metricRepo.find(tenantId, name, dimensions, offset), uriInfo);
return Links.paginate(this.persistUtils.getLimit(limit),
metricRepo.find(tenantId, name, dimensions, offset, this.persistUtils.getLimit(limit)), uriInfo);
}
}

View File

@ -37,6 +37,7 @@ import javax.ws.rs.core.UriInfo;
import monasca.api.app.command.CreateNotificationMethodCommand;
import monasca.api.domain.model.notificationmethod.NotificationMethod;
import monasca.api.domain.model.notificationmethod.NotificationMethodRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
/**
* Notification Method resource implementation.
@ -44,10 +45,12 @@ import monasca.api.domain.model.notificationmethod.NotificationMethodRepo;
@Path("/v2.0/notification-methods")
public class NotificationMethodResource {
private final NotificationMethodRepo repo;
private final PersistUtils persistUtils;
@Inject
public NotificationMethodResource(NotificationMethodRepo repo) {
public NotificationMethodResource(NotificationMethodRepo repo, PersistUtils persistUtils) {
this.repo = repo;
this.persistUtils = persistUtils;
}
@POST
@ -69,9 +72,14 @@ public class NotificationMethodResource {
@Timed
@Produces(MediaType.APPLICATION_JSON)
public Object list(@Context UriInfo uriInfo, @HeaderParam("X-Tenant-Id") String tenantId,
@QueryParam("offset") String offset) {
@QueryParam("offset") String offset,
@QueryParam("limit") String limit)
{
return Links.paginate(offset, Links.hydrate(repo.find(tenantId, offset), uriInfo), uriInfo);
return Links.paginate(this.persistUtils.getLimit(limit),
Links.hydrate(repo.find(tenantId, offset,
this.persistUtils.getLimit(limit)), uriInfo),
uriInfo);
}

View File

@ -30,12 +30,13 @@ import javax.ws.rs.HeaderParam;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.UriInfo;
import monasca.api.app.validation.Validation;
import monasca.api.domain.model.common.Paged;
import monasca.api.domain.model.statistic.StatisticRepo;
import monasca.api.domain.model.statistic.Statistics;
import monasca.api.infrastructure.persistence.PersistUtils;
// import monasca.common.util.stats.Statistics;
@ -47,20 +48,29 @@ public class StatisticResource {
private static final Splitter COMMA_SPLITTER = Splitter.on(',').omitEmptyStrings().trimResults();
private final StatisticRepo repo;
private final PersistUtils persistUtils;
@Inject
public StatisticResource(StatisticRepo repo) {
public StatisticResource(StatisticRepo repo, PersistUtils persistUtils) {
this.repo = repo;
this.persistUtils = persistUtils;
}
@GET
@Timed
@Produces(MediaType.APPLICATION_JSON)
public Object get(@HeaderParam("X-Tenant-Id") String tenantId,
@QueryParam("name") String name, @QueryParam("dimensions") String dimensionsStr,
@QueryParam("start_time") String startTimeStr, @QueryParam("end_time") String endTimeStr,
public Object get(@Context UriInfo uriInfo, @HeaderParam("X-Tenant-Id") String tenantId,
@QueryParam("name") String name,
@QueryParam("dimensions") String dimensionsStr,
@QueryParam("start_time") String startTimeStr,
@QueryParam("end_time") String endTimeStr,
@QueryParam("statistics") String statisticsStr,
@DefaultValue("300") @QueryParam("period") String periodStr) throws Exception {
@DefaultValue("300")
@QueryParam("period") String periodStr,
@QueryParam("offset") String offset,
@QueryParam("limit") String limit,
@QueryParam("merge_metrics") Boolean mergeMetricsFlag) throws Exception {
// Validate query parameters
DateTime startTime = Validation.parseAndValidateDate(startTimeStr, "start_time", true);
@ -74,11 +84,10 @@ public class StatisticResource {
Strings.isNullOrEmpty(dimensionsStr) ? null : Validation.parseAndValidateNameAndDimensions(
name, dimensionsStr);
List<Statistics> statisticsList =
repo.find(tenantId, name, dimensions, startTime, endTime, statistics, period);
Paged paged = new Paged();
paged.elements = statisticsList;
return paged;
return Links.paginateStatistics(this.persistUtils.getLimit(limit),
repo.find(tenantId, name, dimensions, startTime, endTime, statistics,
period, offset, this.persistUtils.getLimit(limit),
mergeMetricsFlag), uriInfo);
}
}

View File

@ -22,12 +22,14 @@ import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.UriInfo;
import monasca.api.domain.model.version.Version;
import monasca.api.domain.model.version.VersionRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
/**
* Version resource implementation.
@ -36,16 +38,23 @@ import monasca.api.domain.model.version.VersionRepo;
@Produces(MediaType.APPLICATION_JSON)
public class VersionResource {
private final VersionRepo repository;
private final PersistUtils persistUtils;
@Inject
public VersionResource(VersionRepo repository) {
public VersionResource(VersionRepo repository,
PersistUtils persistUtils) {
this.repository = repository;
this.persistUtils = persistUtils;
}
@GET
@Timed
public Object list(@Context UriInfo uriInfo) {
return Links.paginate(null, Links.hydrate(repository.find(), uriInfo), uriInfo);
public Object list(@Context UriInfo uriInfo,
@QueryParam("offset") String offset,
@QueryParam("limit") String limit) {
return Links.paginate(this.persistUtils.getLimit(limit),
Links.hydrate(repository.find(), uriInfo), uriInfo);
}
@GET

View File

@ -10,6 +10,9 @@ metricsTopic: metrics
# Topic for publishing domain events to
eventsTopic: events
# Maximum rows (Mysql) or points (Influxdb) to return when listing elements. Only set with Influxdb V9.
#maxQueryLimit: 10000
kafka:
brokerUris:
- 192.168.10.4:9092

View File

@ -38,6 +38,7 @@ import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Resources;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.common.model.alarm.AggregateFunction;
import monasca.common.model.alarm.AlarmOperator;
import monasca.common.model.alarm.AlarmSubExpression;
@ -61,7 +62,7 @@ public class AlarmDefinitionMySqlRepositoryImplTest {
handle = db.open();
handle
.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset()));
repo = new AlarmDefinitionMySqlRepoImpl(db);
repo = new AlarmDefinitionMySqlRepoImpl(db, new PersistUtils());
alarmActions = new ArrayList<String>();
alarmActions.add("29387234");
@ -153,7 +154,7 @@ public class AlarmDefinitionMySqlRepositoryImplTest {
// Warning, this will truncate your mini-mon database
db = new DBI("jdbc:mysql://192.168.10.4/mon", "monapi", "password");
handle = db.open();
repo = new AlarmDefinitionMySqlRepoImpl(db);
repo = new AlarmDefinitionMySqlRepoImpl(db, new PersistUtils());
beforeMethod();
List<String> oldSubAlarmIds = Arrays.asList("222");
@ -197,7 +198,7 @@ public class AlarmDefinitionMySqlRepositoryImplTest {
// Warning, this will truncate your mini-mon database
db = new DBI("jdbc:mysql://192.168.10.4/mon", "monapi", "password");
handle = db.open();
repo = new AlarmDefinitionMySqlRepoImpl(db);
repo = new AlarmDefinitionMySqlRepoImpl(db, new PersistUtils());
beforeMethod();
assertEquals(
@ -220,7 +221,7 @@ public class AlarmDefinitionMySqlRepositoryImplTest {
// Warning, this will truncate your mini-mon database
db = new DBI("jdbc:mysql://192.168.10.4/mon", "monapi", "password");
handle = db.open();
repo = new AlarmDefinitionMySqlRepoImpl(db);
repo = new AlarmDefinitionMySqlRepoImpl(db, new PersistUtils());
beforeMethod();
assertEquals(
@ -244,36 +245,36 @@ public class AlarmDefinitionMySqlRepositoryImplTest {
}
public void shouldFind() {
assertEquals(Arrays.asList(alarmDef_123, alarmDef_234), repo.find("bob", null, null, null));
assertEquals(Arrays.asList(alarmDef_123, alarmDef_234), repo.find("bob", null, null, null, 1));
// Make sure it still finds AlarmDefinitions with no notifications
handle.execute("delete from alarm_action");
alarmDef_123.setAlarmActions(new ArrayList<String>(0));
alarmDef_234.setAlarmActions(new ArrayList<String>(0));
assertEquals(Arrays.asList(alarmDef_123, alarmDef_234), repo.find("bob", null, null, null));
assertEquals(Arrays.asList(alarmDef_123, alarmDef_234), repo.find("bob", null, null, null, 1));
assertEquals(0, repo.find("bill", null, null, null).size());
assertEquals(0, repo.find("bill", null, null, null, 1).size());
}
public void shouldFindByDimension() {
final Map<String, String> dimensions = new HashMap<>();
dimensions.put("image_id", "888");
assertEquals(Arrays.asList(alarmDef_123, alarmDef_234),
repo.find("bob", null, dimensions, null));
repo.find("bob", null, dimensions, null, 1));
dimensions.clear();
dimensions.put("device", "1");
assertEquals(Arrays.asList(alarmDef_123), repo.find("bob", null, dimensions, null));
assertEquals(Arrays.asList(alarmDef_123), repo.find("bob", null, dimensions, null, 1));
dimensions.clear();
dimensions.put("Not real", "AA");
assertEquals(0, repo.find("bob", null, dimensions, null).size());
assertEquals(0, repo.find("bob", null, dimensions, null, 1).size());
}
public void shouldFindByName() {
assertEquals(Arrays.asList(alarmDef_123), repo.find("bob", "90% CPU", null, null));
assertEquals(Arrays.asList(alarmDef_123), repo.find("bob", "90% CPU", null, null, 1));
assertEquals(0, repo.find("bob", "Does not exist", null, null).size());
assertEquals(0, repo.find("bob", "Does not exist", null, null, 1).size());
}
public void shouldDeleteById() {
@ -284,6 +285,6 @@ public class AlarmDefinitionMySqlRepositoryImplTest {
fail();
} catch (EntityNotFoundException expected) {
}
assertEquals(Arrays.asList(alarmDef_234), repo.find("bob", null, null, null));
assertEquals(Arrays.asList(alarmDef_234), repo.find("bob", null, null, null, 1));
}
}

View File

@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableMap.Builder;
import monasca.api.domain.exception.EntityNotFoundException;
import monasca.api.domain.model.alarm.Alarm;
import monasca.api.domain.model.alarm.AlarmRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.common.model.alarm.AlarmState;
import monasca.common.model.alarm.AlarmSubExpression;
import monasca.common.model.metric.MetricDefinition;
@ -71,7 +72,7 @@ public class AlarmMySqlRepositoryImplTest {
handle
.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset()));
*/
repo = new AlarmMySqlRepoImpl(db);
repo = new AlarmMySqlRepoImpl(db, new PersistUtils());
alarmActions = new ArrayList<String>();
alarmActions.add("29387234");
@ -253,49 +254,49 @@ public class AlarmMySqlRepositoryImplTest {
@Test(groups = "database")
public void shouldFind() {
checkList(repo.find("Not a tenant id", null, null, null, null, null));
checkList(repo.find("Not a tenant id", null, null, null, null, null, 1, false));
checkList(repo.find(TENANT_ID, null, null, null, null, null), alarm1, alarm2, alarm3, compoundAlarm);
checkList(repo.find(TENANT_ID, null, null, null, null, null, 1, false), alarm1, alarm2, alarm3, compoundAlarm);
checkList(repo.find(TENANT_ID, compoundAlarm.getAlarmDefinition().getId(), null, null, null, null), compoundAlarm);
checkList(repo.find(TENANT_ID, compoundAlarm.getAlarmDefinition().getId(), null, null, null, null, 1, false), compoundAlarm);
checkList(repo.find(TENANT_ID, null, "cpu.sys_mem", null, null, null), compoundAlarm);
checkList(repo.find(TENANT_ID, null, "cpu.sys_mem", null, null, null, 1, false), compoundAlarm);
checkList(repo.find(TENANT_ID, null, "cpu.idle_perc", null, null, null), alarm1, alarm2, alarm3, compoundAlarm);
checkList(repo.find(TENANT_ID, null, "cpu.idle_perc", null, null, null, 1, false), alarm1, alarm2, alarm3, compoundAlarm);
checkList(
repo.find(TENANT_ID, null, "cpu.idle_perc",
ImmutableMap.<String, String>builder().put("flavor_id", "222").build(), null, null), alarm1,
ImmutableMap.<String, String>builder().put("flavor_id", "222").build(), null, null, 1, false), alarm1,
alarm3);
checkList(
repo.find(TENANT_ID, null, "cpu.idle_perc",
ImmutableMap.<String, String>builder().put("service", "monitoring")
.put("hostname", "roland").build(), null, null), compoundAlarm);
.put("hostname", "roland").build(), null, null, 1, false), compoundAlarm);
checkList(repo.find(TENANT_ID, null, null, null, AlarmState.UNDETERMINED, null), alarm2,
checkList(repo.find(TENANT_ID, null, null, null, AlarmState.UNDETERMINED, null, 1, false), alarm2,
compoundAlarm);
checkList(
repo.find(TENANT_ID, alarm1.getAlarmDefinition().getId(), "cpu.idle_perc", ImmutableMap
.<String, String>builder().put("service", "monitoring").build(), null, null), alarm1, alarm2);
.<String, String>builder().put("service", "monitoring").build(), null, null, 1, false), alarm1, alarm2);
checkList(
repo.find(TENANT_ID, alarm1.getAlarmDefinition().getId(), "cpu.idle_perc", null, null, null),
repo.find(TENANT_ID, alarm1.getAlarmDefinition().getId(), "cpu.idle_perc", null, null, null, 1, false),
alarm1, alarm2, alarm3);
checkList(repo.find(TENANT_ID, compoundAlarm.getAlarmDefinition().getId(), null, null,
AlarmState.UNDETERMINED, null), compoundAlarm);
AlarmState.UNDETERMINED, null, 1, false), compoundAlarm);
checkList(repo.find(TENANT_ID, null, "cpu.sys_mem", null, AlarmState.UNDETERMINED, null),
checkList(repo.find(TENANT_ID, null, "cpu.sys_mem", null, AlarmState.UNDETERMINED, null, 1, false),
compoundAlarm);
checkList(repo.find(TENANT_ID, null, "cpu.idle_perc", ImmutableMap.<String, String>builder()
.put("service", "monitoring").build(), AlarmState.UNDETERMINED, null), alarm2, compoundAlarm);
.put("service", "monitoring").build(), AlarmState.UNDETERMINED, null, 1,false), alarm2, compoundAlarm);
checkList(repo.find(TENANT_ID, alarm1.getAlarmDefinition().getId(), "cpu.idle_perc",
ImmutableMap.<String, String>builder().put("service", "monitoring").build(),
AlarmState.UNDETERMINED, null), alarm2);
AlarmState.UNDETERMINED, null, 1, false), alarm2);
}
@Test(groups = "database")

View File

@ -34,6 +34,7 @@ import com.google.common.io.Resources;
import monasca.api.domain.exception.EntityNotFoundException;
import monasca.api.domain.model.notificationmethod.NotificationMethod;
import monasca.api.domain.model.notificationmethod.NotificationMethodType;
import monasca.api.infrastructure.persistence.PersistUtils;
@Test
public class NotificationMethodMySqlRepositoryImplTest {
@ -47,7 +48,7 @@ public class NotificationMethodMySqlRepositoryImplTest {
handle = db.open();
handle.execute(Resources.toString(getClass().getResource("notification_method.sql"),
Charset.defaultCharset()));
repo = new NotificationMethodMySqlRepoImpl(db);
repo = new NotificationMethodMySqlRepoImpl(db, new PersistUtils());
}
@AfterClass
@ -84,7 +85,7 @@ public class NotificationMethodMySqlRepositoryImplTest {
}
public void shouldFind() {
List<NotificationMethod> nms = repo.find("444", null);
List<NotificationMethod> nms = repo.find("444", null, 1);
assertEquals(nms, Arrays.asList(new NotificationMethod("123", "MyEmail",
NotificationMethodType.EMAIL, "a@b")));

View File

@ -66,6 +66,6 @@ public class AlarmStateHistoryVerticaRepositoryImplTest {
@Test
public void shouldCreateAndFind() throws Exception {
create("bob", "123", AlarmState.UNDETERMINED, AlarmState.ALARM, "foo", "bar", new DateTime());
assertEquals(repo.findById("bob", "123", null).size(), 1);
assertEquals(repo.findById("bob", "123", null, 1).size(), 1);
}
}

View File

@ -87,7 +87,8 @@ public class MeasurementVerticaRepositoryImplTest {
public void shouldFindWithoutDimensions() throws Exception {
Collection<Measurements> measurements =
repo.find("bob", "cpu_utilization", null, new DateTime(2014, 1, 1, 0, 0, 0), null, null);
repo.find("bob", "cpu_utilization", null, new DateTime(2014, 1, 1, 0, 0, 0), null, null, 1,
false);
assertEquals(measurements.size(), 3);
}
@ -97,11 +98,13 @@ public class MeasurementVerticaRepositoryImplTest {
dims.put("instance_id", "123");
Collection<Measurements> measurements =
repo.find("bob", "cpu_utilization", dims, new DateTime(2014, 1, 1, 0, 0), null, null);
repo.find("bob", "cpu_utilization", dims, new DateTime(2014, 1, 1, 0, 0), null, null, 1,
false);
assertEquals(measurements.size(), 2);
dims.put("flavor_id", "2");
measurements = repo.find("bob", "cpu_utilization", dims, new DateTime(2014, 1, 1, 0, 0), null, null);
measurements = repo.find("bob", "cpu_utilization", dims, new DateTime(2014, 1, 1, 0, 0), null, null, 1,
false);
assertEquals(measurements.size(), 1);
}
}

View File

@ -85,7 +85,7 @@ public class MetricDefinitionVerticaRepositoryImplTest {
}
public void shouldFindWithoutDimensions() throws Exception {
List<MetricDefinition> defs = repo.find("bob", "cpu_utilization", null, null);
List<MetricDefinition> defs = repo.find("bob", "cpu_utilization", null, null, 1);
assertEquals(defs.size(), 3);
}
@ -94,11 +94,11 @@ public class MetricDefinitionVerticaRepositoryImplTest {
dims.put("service", "compute");
dims.put("instance_id", "123");
List<MetricDefinition> defs = repo.find("bob", "cpu_utilization", dims, null);
List<MetricDefinition> defs = repo.find("bob", "cpu_utilization", dims, null, 1);
assertEquals(defs.size(), 2);
dims.put("flavor_id", "2");
defs = repo.find("bob", "cpu_utilization", dims, null);
defs = repo.find("bob", "cpu_utilization", dims, null, 1);
assertEquals(defs.size(), 1);
}
}

View File

@ -50,6 +50,7 @@ import monasca.api.domain.exception.EntityNotFoundException;
import monasca.api.domain.model.alarmdefinition.AlarmDefinition;
import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo;
import monasca.api.domain.model.alarmstatehistory.AlarmStateHistoryRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.api.infrastructure.persistence.mysql.AlarmDefinitionMySqlRepoImpl;
import monasca.api.infrastructure.persistence.mysql.AlarmMySqlRepoImpl;
import monasca.api.infrastructure.persistence.mysql.NotificationMethodMySqlRepoImpl;
@ -83,11 +84,11 @@ public class AlarmIntegrationTest extends AbstractMonApiResourceTest {
.execute("insert into notification_method (id, tenant_id, name, type, address, created_at, updated_at) values ('77778687', 'alarm-test', 'MyEmail', 'EMAIL', 'a@b', NOW(), NOW())");
mysqlDb.close(handle);
repo = new AlarmDefinitionMySqlRepoImpl(mysqlDb);
repo = new AlarmDefinitionMySqlRepoImpl(mysqlDb, new PersistUtils());
service =
new AlarmDefinitionService(config, producer, repo, new AlarmMySqlRepoImpl(mysqlDb),
new NotificationMethodMySqlRepoImpl(mysqlDb));
addResources(new AlarmDefinitionResource(service, repo));
new AlarmDefinitionService(config, producer, repo, new AlarmMySqlRepoImpl(mysqlDb, new PersistUtils()),
new NotificationMethodMySqlRepoImpl(mysqlDb, new PersistUtils()));
addResources(new AlarmDefinitionResource(service, repo, new PersistUtils()));
}
@BeforeTest

View File

@ -39,6 +39,7 @@ import monasca.api.MonApiModule;
import monasca.api.app.MetricService;
import monasca.api.app.command.CreateMetricCommand;
import monasca.api.domain.model.metric.MetricDefinitionRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.api.resource.AbstractMonApiResourceTest;
import monasca.api.resource.MetricResource;
import com.sun.jersey.api.client.ClientResponse;
@ -62,7 +63,7 @@ public class MetricIntegrationTest extends AbstractMonApiResourceTest {
db.close(handle);
metricRepo = mock(MetricDefinitionRepo.class);
service = new MetricService(config, producer, metricRegistry);
addResources(new MetricResource(service, metricRepo));
addResources(new MetricResource(service, metricRepo, new PersistUtils()));
}
@BeforeTest

View File

@ -38,6 +38,7 @@ import monasca.api.domain.exception.EntityNotFoundException;
import monasca.api.domain.model.notificationmethod.NotificationMethod;
import monasca.api.domain.model.notificationmethod.NotificationMethodRepo;
import monasca.api.domain.model.notificationmethod.NotificationMethodType;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.api.infrastructure.persistence.mysql.NotificationMethodMySqlRepoImpl;
import monasca.api.resource.AbstractMonApiResourceTest;
import monasca.api.resource.NotificationMethodResource;
@ -59,8 +60,8 @@ public class NotificationMethodIntegrationTest extends AbstractMonApiResourceTes
.execute("insert into notification_method (id, tenant_id, name, type, address, created_at, updated_at) values ('29387234', 'notification-method-test', 'MyEmaila', 'EMAIL', 'a@b', NOW(), NOW())");
db.close(handle);
repo = new NotificationMethodMySqlRepoImpl(db);
addResources(new NotificationMethodResource(repo));
repo = new NotificationMethodMySqlRepoImpl(db, new PersistUtils());
addResources(new NotificationMethodResource(repo, new PersistUtils()));
}
@BeforeTest

View File

@ -15,6 +15,7 @@
package monasca.api.resource;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyMap;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
@ -30,6 +31,7 @@ import static org.testng.Assert.fail;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@ -41,6 +43,7 @@ import monasca.api.app.AlarmDefinitionService;
import monasca.api.app.command.CreateAlarmDefinitionCommand;
import monasca.api.app.command.UpdateAlarmDefinitionCommand;
import monasca.api.domain.model.common.Paged;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.common.model.alarm.AlarmExpression;
import monasca.api.domain.exception.EntityNotFoundException;
import monasca.api.domain.model.alarmdefinition.AlarmDefinition;
@ -84,10 +87,10 @@ public class AlarmDefinitionResourceTest extends AbstractMonApiResourceTest {
repo = mock(AlarmDefinitionRepo.class);
when(repo.findById(eq("abc"), eq("123"))).thenReturn(alarm);
when(repo.find(anyString(), anyString(), (Map<String, String>) anyMap(), anyString())).thenReturn(
when(repo.find(anyString(), anyString(), (Map<String, String>) anyMap(), anyString(), anyInt())).thenReturn(
Arrays.asList(alarmItem));
addResources(new AlarmDefinitionResource(service, repo));
addResources(new AlarmDefinitionResource(service, repo, new PersistUtils()));
}
@SuppressWarnings("unchecked")
@ -269,37 +272,35 @@ public class AlarmDefinitionResourceTest extends AbstractMonApiResourceTest {
@SuppressWarnings("unchecked")
public void shouldList() {
Map
lhm =
(Map) client().resource("/v2.0/alarm-definitions").header("X-Tenant-Id", "abc")
.get(Paged.class).elements.get(0);
AlarmDefinition
ad =
new AlarmDefinition((String) lhm.get("id"), (String) lhm.get("name"),
(String) lhm.get("description"), (String) lhm.get("severity"),
(String) lhm.get("expression"), (List<String>) lhm.get("match_by"),
(boolean) lhm.get("actions_enabled"),
(List<String>) lhm.get("alarm_actions"),
(List<String>) lhm.get("ok_actions"),
(List<String>) lhm.get("undetermined_actions"));
Map lhm = (Map) client().resource("/v2.0/alarm-definitions").header("X-Tenant-Id", "abc")
.get(Paged.class).elements.get(0);
AlarmDefinition ad = new AlarmDefinition((String) lhm.get("id"), (String) lhm.get("name"),
(String) lhm.get("description"),
(String) lhm.get("severity"),
(String) lhm.get("expression"),
(List<String>) lhm.get("match_by"),
(boolean) lhm.get("actions_enabled"),
(List<String>) lhm.get("alarm_actions"),
(List<String>) lhm.get("ok_actions"),
(List<String>) lhm.get("undetermined_actions"));
List<Map<String, String>> links = (List<Map<String, String>>) lhm.get("links");
List<Link>
linksList =
Arrays.asList(new Link(links.get(0).get("rel"), links.get(0).get("href")));
List<Link> linksList = Arrays.asList(new Link(links.get(0).get("rel"), links.get(0).get("href")));
ad.setLinks(linksList);
List<AlarmDefinition> alarms = Arrays.asList(ad);
assertEquals(alarms, Arrays.asList(alarmItem));
verify(repo).find(eq("abc"), anyString(), (Map<String, String>) anyMap(), anyString());
verify(repo).find(eq("abc"), anyString(), (Map<String, String>) anyMap(), anyString(), anyInt());
}
@SuppressWarnings("unchecked")
public void shouldListByName() throws Exception {
Map
lhm =
(Map) client()
@ -326,7 +327,8 @@ public class AlarmDefinitionResourceTest extends AbstractMonApiResourceTest {
List<AlarmDefinition> alarms = Arrays.asList(ad);
assertEquals(alarms, Arrays.asList(alarmItem));
verify(repo).find(eq("abc"), eq("foo bar baz"), (Map<String, String>) anyMap(), anyString());
verify(repo).find(eq("abc"), eq("foo bar baz"), (Map<String, String>) anyMap(), anyString(),
anyInt());
}
public void shouldGet() {
@ -370,8 +372,8 @@ public class AlarmDefinitionResourceTest extends AbstractMonApiResourceTest {
@SuppressWarnings("unchecked")
public void should500OnInternalException() {
doThrow(new RuntimeException("")).when(repo).find(anyString(), anyString(),
(Map<String, String>) anyObject(),
anyString());
(Map<String, String>) anyObject(), anyString(), anyInt());
try {
client().resource("/v2.0/alarm-definitions").header("X-Tenant-Id", "abc").get(List.class);
@ -382,8 +384,7 @@ public class AlarmDefinitionResourceTest extends AbstractMonApiResourceTest {
}
public void shouldHydateLinksOnList() {
List<Link> expected =
Arrays.asList(new Link("self", "/v2.0/alarm-definitions/123"));
List<Link> expected = Arrays.asList(new Link("self", "/v2.0/alarm-definitions/123"));
Map
lhm =

View File

@ -35,6 +35,7 @@ import org.testng.annotations.Test;
import monasca.api.app.MetricService;
import monasca.api.app.command.CreateMetricCommand;
import monasca.api.domain.model.metric.MetricDefinitionRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.api.resource.exception.ErrorMessages;
import com.sun.jersey.api.client.ClientResponse;
@ -62,7 +63,7 @@ public class MetricResourceTest extends AbstractMonApiResourceTest {
doNothing().when(service).create(any(List.class), anyString(), anyString());
metricRepo = mock(MetricDefinitionRepo.class);
addResources(new MetricResource(service, metricRepo));
addResources(new MetricResource(service, metricRepo, new PersistUtils()));
}
@SuppressWarnings("unchecked")

View File

@ -23,6 +23,7 @@ import monasca.api.domain.model.common.Paged;
import monasca.api.domain.model.notificationmethod.NotificationMethod;
import monasca.api.domain.model.notificationmethod.NotificationMethodRepo;
import monasca.api.domain.model.notificationmethod.NotificationMethodType;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.api.resource.exception.ErrorMessages;
import org.mockito.internal.matchers.Not;
@ -62,9 +63,9 @@ public class NotificationMethodResourceTest extends AbstractMonApiResourceTest {
when(repo.create(eq("abc"), eq("MyPd"), eq(NotificationMethodType.PAGERDUTY), anyString()))
.thenReturn(notificationMethodPagerduty);
when(repo.findById(eq("abc"), eq("123"))).thenReturn(notificationMethod);
when(repo.find(eq("abc"), anyString())).thenReturn(Arrays.asList(notificationMethod));
when(repo.find(eq("abc"), anyString(), anyInt())).thenReturn(Arrays.asList(notificationMethod));
addResources(new NotificationMethodResource(repo));
addResources(new NotificationMethodResource(repo, new PersistUtils()));
}
public void shouldCreate() {
@ -192,6 +193,7 @@ public class NotificationMethodResourceTest extends AbstractMonApiResourceTest {
public void shouldList() {
Map
lhm =
(Map) client().resource("/v2.0/notification-methods").header("X-Tenant-Id", "abc")
@ -203,18 +205,9 @@ public class NotificationMethodResourceTest extends AbstractMonApiResourceTest {
NotificationMethodType.fromJson((String) lhm.get("type")),
(String) lhm.get("address"));
List<Map<String, String>> links = (List<Map<String, String>>) lhm.get("links");
List<Link>
linksList =
Arrays.asList(new Link(links.get(0).get("rel"), links.get(0).get("href")));
nm.setLinks(linksList);
List<NotificationMethod> notificationMethods = Arrays.asList(nm);
assertEquals(notificationMethods, Arrays.asList(notificationMethod));
verify(repo).find(eq("abc"), anyString());
verify(repo).find(eq("abc"), anyString(), anyInt());
}
public void shouldGet() {
@ -256,7 +249,7 @@ public class NotificationMethodResourceTest extends AbstractMonApiResourceTest {
}
public void should500OnInternalException() {
doThrow(new RuntimeException("")).when(repo).find(anyString(), anyString());
doThrow(new RuntimeException("")).when(repo).find(anyString(), anyString(), anyInt());
try {
client().resource("/v2.0/notification-methods").header("X-Tenant-Id", "abc")

View File

@ -15,6 +15,7 @@
package monasca.api.resource;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
@ -28,6 +29,8 @@ import org.joda.time.DateTime;
import org.testng.annotations.Test;
import monasca.api.domain.model.statistic.StatisticRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
import com.sun.jersey.api.client.ClientResponse;
@Test
@ -40,17 +43,19 @@ public class StatisticResourceTest extends AbstractMonApiResourceTest {
super.setupResources();
statisticRepo = mock(StatisticRepo.class);
addResources(new StatisticResource(statisticRepo));
addResources(new StatisticResource(statisticRepo, new PersistUtils()));
}
@SuppressWarnings("unchecked")
public void shouldQueryWithDefaultParams() throws Exception {
client()
.resource(
"/v2.0/metrics/statistics?name=cpu_utilization&start_time=2013-11-20T18:43Z&dimensions=service:hpcs.compute,%20instance_id:123&statistics=avg,%20min,%20max&period=60")
.header("X-Tenant-Id", "abc").get(ClientResponse.class);
verify(statisticRepo).find(anyString(), anyString(), any(Map.class), any(DateTime.class),
any(DateTime.class), any(List.class), anyInt());
any(DateTime.class), any(List.class), anyInt(), any(String.class), anyInt(),
anyBoolean());
}
public void queryShouldThrowOnInvalidDateFormat() throws Exception {

View File

@ -37,6 +37,8 @@ import monasca.api.domain.model.common.Paged;
import monasca.api.domain.model.version.Version;
import monasca.api.domain.model.version.Version.VersionStatus;
import monasca.api.domain.model.version.VersionRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
import com.sun.jersey.api.client.GenericType;
@Test
@ -54,7 +56,7 @@ public class VersionResourceTest extends AbstractMonApiResourceTest {
repo = mock(VersionRepo.class);
when(repo.findById(eq("v2.0"))).thenReturn(version);
when(repo.find()).thenReturn(Arrays.asList(version));
addResources(new VersionResource(repo));
addResources(new VersionResource(repo, new PersistUtils()));
}
public void shouldList() {