monasca-transform/devstack/files/monasca-transform/monasca-transform.conf

89 lines
3.0 KiB
Plaintext

[DEFAULTS]
[repositories]
offsets = monasca_transform.mysql_offset_specs:MySQLOffsetSpecs
data_driven_specs = monasca_transform.data_driven_specs.mysql_data_driven_specs_repo:MySQLDataDrivenSpecsRepo
offsets_max_revisions = 10
[database]
server_type = mysql:thin
host = localhost
database_name = monasca_transform
username = m-transform
password = password
[messaging]
adapter = monasca_transform.messaging.adapter:KafkaMessageAdapter
topic = metrics
brokers=192.168.15.6:9092
publish_region = useast
publish_kafka_project_id=d2cb21079930415a9f2a33588b9f2bb6
adapter_pre_hourly = monasca_transform.messaging.adapter:KafkaMessageAdapterPreHourly
topic_pre_hourly = metrics_pre_hourly
[stage_processors]
pre_hourly_processor_enabled = True
[pre_hourly_processor]
late_metric_slack_time = 600
enable_instance_usage_df_cache = True
instance_usage_df_cache_storage_level = MEMORY_ONLY_SER_2
enable_batch_time_filtering = True
data_provider=monasca_transform.processor.pre_hourly_processor:PreHourlyProcessorDataProvider
effective_batch_revision=2
#
# Configurable values for the monasca-transform service
#
[service]
# The address of the mechanism being used for election coordination
coordinator_address = kazoo://localhost:2181
# The name of the coordination/election group
coordinator_group = monasca-transform
# How long the candidate should sleep between election result
# queries (in seconds)
election_polling_frequency = 15
# Whether debug-level log entries should be included in the application
# log. If this setting is false, info-level will be used for logging.
enable_debug_log_entries = true
# The path for the monasca-transform Spark driver
spark_driver = /opt/monasca/transform/lib/driver.py
# the location for the transform-service log
service_log_path=/var/log/monasca/transform/
# the filename for the transform-service log
service_log_filename=monasca-transform.log
# Whether Spark event logging should be enabled (true/false)
spark_event_logging_enabled = true
# A list of jars which Spark should use
spark_jars_list = /opt/spark/current/assembly/target/scala-2.10/jars/spark-streaming-kafka-0-8_2.10-2.2.0.jar,/opt/spark/current/assembly/target/scala-2.10/jars/scala-library-2.10.6.jar,/opt/spark/current/assembly/target/scala-2.10/jars/kafka_2.10-0.8.1.1.jar,/opt/spark/current/assembly/target/scala-2.10/jars/metrics-core-2.2.0.jar,/opt/spark/current/assembly/target/scala-2.10/jars/drizzle-jdbc-1.3.jar
# A list of where the Spark master(s) should run
spark_master_list = spark://localhost:7077
# spark_home for the environment
spark_home = /opt/spark/current
# Python files for Spark to use
spark_python_files = /opt/monasca/transform/lib/monasca-transform.zip
# How often the stream should be read (in seconds)
stream_interval = 600
# The working directory for monasca-transform
work_dir = /var/run/monasca/transform
# enable caching of record store df
enable_record_store_df_cache = True
# set spark storage level for record store df cache
record_store_df_cache_storage_level = MEMORY_ONLY_SER_2