Change Spark version to 1.6.3
We have seen several instances where Spark 1.6.1 over time continues to consume more and more resources. The change Ibf244cbfc00a90ada66f492b473719c25fa17fd2 was not enough alone to curb this growth, but the new version of Spark has shown better behavior. Related changes will also need to be done in any installer, such as Ansible. Change-Id: Ib6b1220cf0186def115846c8cf71684bb2d6e8c7
This commit is contained in:
parent
b852f7141f
commit
a64f1247a8
|
@ -62,7 +62,7 @@ service_log_filename=monasca-transform.log
|
|||
spark_event_logging_enabled = true
|
||||
|
||||
# A list of jars which Spark should use
|
||||
spark_jars_list = /opt/spark/current/lib/spark-streaming-kafka_2.10-1.6.1.jar,/opt/spark/current/lib/scala-library-2.10.1.jar,/opt/spark/current/lib/kafka_2.10-0.8.1.1.jar,/opt/spark/current/lib/metrics-core-2.2.0.jar,/opt/spark/current/lib/drizzle-jdbc-1.3.jar
|
||||
spark_jars_list = /opt/spark/current/lib/spark-streaming-kafka_2.10-1.6.3.jar,/opt/spark/current/lib/scala-library-2.10.1.jar,/opt/spark/current/lib/kafka_2.10-0.8.1.1.jar,/opt/spark/current/lib/metrics-core-2.2.0.jar,/opt/spark/current/lib/drizzle-jdbc-1.3.jar
|
||||
|
||||
# A list of where the Spark master(s) should run
|
||||
spark_master_list = spark://localhost:7077
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
. /opt/spark/current/conf/spark-env.sh
|
||||
export EXEC_CLASS=org.apache.spark.deploy.master.Master
|
||||
export INSTANCE_ID=1
|
||||
export SPARK_CLASSPATH=/opt/spark/current/conf/:/opt/spark/current/lib/spark-assembly-1.6.1-hadoop2.6.0.jar:/opt/spark/current/lib/datanucleus-core-3.2.10.jar:/opt/spark/current/lib/datanucleus-rdbms-3.2.9.jar:/opt/spark/current/lib/datanucleus-api-jdo-3.2.6.jar
|
||||
export SPARK_CLASSPATH=/opt/spark/current/conf/:/opt/spark/current/lib/spark-assembly-1.6.3-hadoop2.6.0.jar:/opt/spark/current/lib/datanucleus-core-3.2.10.jar:/opt/spark/current/lib/datanucleus-rdbms-3.2.9.jar:/opt/spark/current/lib/datanucleus-api-jdo-3.2.6.jar
|
||||
export log="$SPARK_LOG_DIR/spark-spark-"$EXEC_CLASS"-"$INSTANCE_ID"-127.0.0.1.out"
|
||||
export SPARK_HOME=/opt/spark/current
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
. /opt/spark/current/conf/spark-env.sh
|
||||
export EXEC_CLASS=org.apache.spark.deploy.worker.Worker
|
||||
export INSTANCE_ID=1
|
||||
export SPARK_CLASSPATH=/opt/spark/current/conf/:/opt/spark/current/lib/spark-assembly-1.6.1-hadoop2.6.0.jar:/opt/spark/current/lib/datanucleus-core-3.2.10.jar:/opt/spark/current/lib/datanucleus-rdbms-3.2.9.jar:/opt/spark/current/lib/datanucleus-api-jdo-3.2.6.jar
|
||||
export SPARK_CLASSPATH=/opt/spark/current/conf/:/opt/spark/current/lib/spark-assembly-1.6.3-hadoop2.6.0.jar:/opt/spark/current/lib/datanucleus-core-3.2.10.jar:/opt/spark/current/lib/datanucleus-rdbms-3.2.9.jar:/opt/spark/current/lib/datanucleus-api-jdo-3.2.6.jar
|
||||
export log="$SPARK_LOG_DIR/spark-spark-"$EXEC_CLASS"-"$INSTANCE_ID"-127.0.0.1.out"
|
||||
export SPARK_HOME=/opt/spark/current
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ function install_java_libs {
|
|||
function link_spark_streaming_lib {
|
||||
|
||||
pushd /opt/spark/current/lib
|
||||
ln -sf spark-streaming-kafka.jar spark-streaming-kafka_2.10-1.6.1.jar
|
||||
ln -sf spark-streaming-kafka.jar spark-streaming-kafka_2.10-1.6.3.jar
|
||||
popd
|
||||
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ enable_service spark-worker
|
|||
|
||||
# spark vars
|
||||
SPARK_DIRECTORIES=("/var/spark" "/var/log/spark" "/var/log/spark/events" "/var/run/spark" "/var/run/spark/work" "/etc/spark/conf" "/etc/spark/init" )
|
||||
SPARK_VERSION=${SPARK_VERSION:-1.6.1}
|
||||
SPARK_VERSION=${SPARK_VERSION:-1.6.3}
|
||||
HADOOP_VERSION=${HADOOP_VERSION:-2.6}
|
||||
SPARK_HADOOP_VERSION=spark-$SPARK_VERSION-bin-hadoop$HADOOP_VERSION
|
||||
SPARK_TARBALL_NAME=${SPARK_HADOOP_VERSION}.tgz
|
||||
|
|
Loading…
Reference in New Issue