[DEFAULTS] [repositories] offsets = monasca_transform.mysql_offset_specs:MySQLOffsetSpecs data_driven_specs = monasca_transform.data_driven_specs.mysql_data_driven_specs_repo:MySQLDataDrivenSpecsRepo offsets_max_revisions = 10 [database] server_type = mysql:thin host = localhost database_name = monasca_transform username = m-transform password = password [messaging] adapter = monasca_transform.messaging.adapter:KafkaMessageAdapter topic = metrics brokers = localhost:9092 publish_kafka_project_id = d2cb21079930415a9f2a33588b9f2bb6 adapter_pre_hourly = monasca_transform.messaging.adapter:KafkaMessageAdapterPreHourly topic_pre_hourly = metrics_pre_hourly [stage_processors] enable_pre_hourly_processor = True [pre_hourly_processor] enable_instance_usage_df_cache = True instance_usage_df_cache_storage_level = MEMORY_ONLY_SER_2 enable_batch_time_filtering = True effective_batch_revision=2 # # Configurable values for the monasca-transform service # [service] # The address of the mechanism being used for election coordination coordinator_address = kazoo://localhost:2181 # The name of the coordination/election group coordinator_group = monasca-transform # How long the candidate should sleep between election result # queries (in seconds) election_polling_frequency = 15 # Whether debug-level log entries should be included in the application # log. If this setting is false, info-level will be used for logging. enable_debug_log_entries = true # The path for the setup file to be executed setup_file = /opt/stack/monasca-transform/setup.py # The target of the setup file setup_target = bdist_egg # The path for the monasca-transform Spark driver spark_driver = /opt/stack/monasca-transform/monasca_transform/driver/mon_metrics_kafka.py # the location for the transform-service log service_log_path=/var/log/monasca/transform/ # the filename for the transform-service log service_log_filename=monasca-transform.log # Whether Spark event logging should be enabled (true/false) spark_event_logging_enabled = true # A list of jars which Spark should use spark_jars_list = /opt/spark/current/assembly/target/scala-2.10/jars/spark-streaming-kafka-0-8_2.10-2.1.1.jar,/opt/spark/current/assembly/target/scala-2.10/jars/scala-library-2.10.6.jar,/opt/spark/current/assembly/target/scala-2.10/jars/kafka_2.10-0.8.1.1.jar,/opt/spark/current/assembly/target/scala-2.10/jars/metrics-core-2.2.0.jar,/opt/spark/current/assembly/target/scala-2.10/jars/drizzle-jdbc-1.3.jar # A list of where the Spark master(s) should run spark_master_list = spark://localhost:7077 # spark_home for the environment spark_home = /opt/spark/current # Python files for Spark to use spark_python_files = /opt/stack/monasca-transform/dist/monasca_transform-0.0.1.egg # How often the stream should be read (in seconds) stream_interval = 600 # The working directory for monasca-transform work_dir = /opt/stack/monasca-transform enable_record_store_df_cache = True record_store_df_cache_storage_level = MEMORY_ONLY_SER_2