Removed ps-streaming dependencies in favor of mon-streaming

Change-Id: I87664e63af549f44b8422bf2c7446b2d88b70d9d
This commit is contained in:
Jonathan Halterman 2014-03-10 13:29:58 -07:00
parent 616ca788ad
commit f2a4c18099
5 changed files with 14 additions and 16 deletions

18
pom.xml
View File

@ -15,8 +15,6 @@
<properties>
<versionNumber>1.0.0</versionNumber>
<computedVersion>${versionNumber}-SNAPSHOT</computedVersion>
<!-- TODO REMOVE -->
<ps.common.version>1.0.0.295</ps.common.version>
<mon.common.version>1.0.0-SNAPSHOT</mon.common.version>
<dropwizard.version>0.7.0-rc1</dropwizard.version>
@ -98,12 +96,6 @@
</exclusion>
</exclusions>
</dependency>
<!-- TODO REMOVE -->
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>ps-streaming</artifactId>
<version>${ps.common.version}</version>
</dependency>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-model</artifactId>
@ -114,6 +106,11 @@
<artifactId>mon-persistence</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-streaming</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-db</artifactId>
@ -137,11 +134,10 @@
<version>${mon.common.version}</version>
<scope>test</scope>
</dependency>
<!-- TODO REMOVE OR MIGRATE ME -->
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>ps-streaming</artifactId>
<version>${ps.common.version}</version>
<artifactId>mon-streaming</artifactId>
<version>${mon.common.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>

View File

@ -9,6 +9,9 @@ import javax.validation.constraints.NotNull;
import org.hibernate.validator.constraints.NotEmpty;
import com.hpcloud.messaging.rabbitmq.RabbitMQConfiguration;
import com.hpcloud.streaming.storm.amqp.AMQPSpoutConfiguration;
/**
* Thresholding configuration.
*

View File

@ -5,7 +5,6 @@ import io.dropwizard.db.DataSourceFactory;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.fileupload.util.Streams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -15,8 +14,6 @@ import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Tuple;
import com.hpcloud.messaging.rabbitmq.RabbitMQConfiguration;
import com.hpcloud.messaging.rabbitmq.RabbitMQService;
import com.hpcloud.mon.ThresholdingConfiguration;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmState;
@ -27,6 +24,7 @@ import com.hpcloud.mon.domain.service.AlarmDAO;
import com.hpcloud.mon.infrastructure.messaging.MessagingModule;
import com.hpcloud.mon.infrastructure.persistence.PersistenceModule;
import com.hpcloud.streaming.storm.Logging;
import com.hpcloud.streaming.storm.Streams;
import com.hpcloud.util.Injector;
import com.hpcloud.util.Serialization;

View File

@ -8,7 +8,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.fileupload.util.Streams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -31,6 +30,7 @@ import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.hpcloud.mon.domain.service.SubAlarmStatsRepository;
import com.hpcloud.mon.infrastructure.persistence.PersistenceModule;
import com.hpcloud.streaming.storm.Logging;
import com.hpcloud.streaming.storm.Streams;
import com.hpcloud.streaming.storm.Tuples;
import com.hpcloud.util.Injector;
@ -184,6 +184,7 @@ public class MetricAggregationBolt extends BaseRichBolt {
else {
LOG.debug("Creating SubAlarmStats for {}", metricDefinition);
for (SubAlarm subAlarm : subAlarms)
// TODO should treat metric def name previx like a namespace
subAlarm.setSporadicMetric(sporadicMetricNamespaces.contains(metricDefinition.namespace));
long viewEndTimestamp = (System.currentTimeMillis() / 1000) + evaluationTimeOffset;
subAlarmStatsRepo = new SubAlarmStatsRepository(subAlarms, viewEndTimestamp);

View File

@ -5,7 +5,6 @@ import io.dropwizard.db.DataSourceFactory;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.fileupload.util.Streams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -24,6 +23,7 @@ import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.hpcloud.mon.infrastructure.persistence.PersistenceModule;
import com.hpcloud.streaming.storm.Logging;
import com.hpcloud.streaming.storm.Streams;
import com.hpcloud.util.Injector;
/**