Merge "Merge Vanilla and Spark plugins"

This commit is contained in:
Jenkins 2016-09-07 13:48:27 +00:00 committed by Gerrit Code Review
commit 87d7000744
5 changed files with 55 additions and 38 deletions

View File

@ -477,11 +477,13 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "vanilla" ]; then
export OOZIE_HADOOP_V2_7_1_DOWNLOAD_URL=${OOZIE_HADOOP_V2_7_1_FILE:-"http://sahara-files.mirantis.com/oozie-4.2.0-hadoop-2.7.1.tar.gz"}
export DIB_HDFS_LIB_DIR="/opt/hadoop/share/hadoop/tools/lib"
export plugin_type="vanilla"
export DIB_SPARK_VERSION=1.6.0
export SPARK_HADOOP_DL=hadoop2.6
ubuntu_elements_sequence="hadoop oozie mysql hive $JAVA_ELEMENT swift_hadoop"
fedora_elements_sequence="hadoop oozie mysql disable-firewall hive $JAVA_ELEMENT swift_hadoop"
centos_elements_sequence="hadoop oozie mysql disable-firewall hive $JAVA_ELEMENT swift_hadoop"
centos7_elements_sequence="hadoop oozie mysql disable-firewall hive $JAVA_ELEMENT swift_hadoop"
ubuntu_elements_sequence="hadoop oozie mysql hive $JAVA_ELEMENT swift_hadoop spark"
fedora_elements_sequence="hadoop oozie mysql disable-firewall hive $JAVA_ELEMENT swift_hadoop spark"
centos_elements_sequence="hadoop oozie mysql disable-firewall hive $JAVA_ELEMENT swift_hadoop spark"
centos7_elements_sequence="hadoop oozie mysql disable-firewall hive $JAVA_ELEMENT swift_hadoop spark"
# Workaround for https://bugs.launchpad.net/diskimage-builder/+bug/1204824
# https://bugs.launchpad.net/sahara/+bug/1252684
@ -538,6 +540,8 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "vanilla" ]; then
unset plugin_type
unset DIB_HDFS_LIB_DIR
unset DIB_SPARK_VERSION
unset SPARK_HADOOP_DL
fi
##########################

View File

@ -2,13 +2,14 @@
spark
=====
Installs Spark on Ubuntu. Requires Hadoop (currently from CDH distribution).
Installs Spark. Requires Hadoop.
This element will install Spark into an Ubuntu image. It tries to guess the
correct file to download based on the ``DIB_SPARK_VERSION`` and ``DIB_CDH_VERSION``
variables, but this behaviour can be overridden by using ``SPARK_DOWNLOAD_URL``
to specify a download URL for a pre-built Spark tar.gz file. See
http://spark.apache.org/downloads.html for more download options.
This element will install Spark. It tries to guess the
correct file to download based on the ``DIB_SPARK_VERSION`` and
``DIB_CDH_VERSION``, but this behaviour can be overridden by using
``SPARK_DOWNLOAD_URL`` to specify a download URL for a pre-built
Spark tar.gz file.
See http://spark.apache.org/downloads.html for more download options.
Versions
--------
@ -32,10 +33,11 @@ Environment Variables
DIB_SPARK_VERSION
:Required: Yes, if ``SPARK_DOWNLOAD_URL`` is not set.
:Description: Version of the Spark package to download.
:Exmaple: ``DIB_SPARK_VERSION=1.3.1``
:Example: ``DIB_SPARK_VERSION=1.3.1``
DIB_CDH_VERSION
:Required: Yes, if ``SPARK_DOWNLOAD_URL`` is not set.
:Required: Required only for images for Spark Plugin and
if ``SPARK_DOWNLOAD_URL`` is not set.
:Description: Version of the CDH platform to use for Hadoop compatibility.
CDH version 5.3 is known to work well.
:Example: ``DIB_CDH_VERSION=5.3``

View File

@ -11,10 +11,17 @@ set -o pipefail
# and we need to execute this code the first time the VM boots.
firstboot_script_name="/opt/spark/firstboot.sh"
sed -i -e "s,^exit 0$,[ -f $firstboot_script_name ] \&\& sh $firstboot_script_name; exit 0," /etc/rc.local
user_and_group_names="ubuntu:ubuntu"
if [ "$DISTRO_NAME" == "ubuntu" ]; then
sed -i -e "s,^exit 0$,[ -f $firstboot_script_name ] \&\& sh $firstboot_script_name; exit 0," /etc/rc.local
if [ "$plugin_type" == "vanilla" ]; then
user_and_group_names="hadoop:hadoop"
elif [ "$plugin_type" == "spark" ]; then
user_and_group_names="ubuntu:ubuntu"
fi
else
sed -i -e "s,^exit 0$,[ -f $firstboot_script_name ] \&\& sh $firstboot_script_name; exit 0," /etc/rc.d/rc.local
user_and_group_names="hadoop:hadoop"
fi
cat >> $firstboot_script_name <<EOF
#!/bin/sh

View File

@ -28,7 +28,9 @@ elif [ "${major_v}" == "1" -a "${minor_v}" == "0" ]; then
print_deprecation_warning
fi
if [ -z "${SPARK_DOWNLOAD_URL:-}" -a -z "${DIB_CDH_VERSION:-}" ]; then
echo -e "Neither DIB_CDH_VERSION nor SPARK_DOWNLOAD_URL are set. Impossible to install Spark.\nAborting"
exit 1
if [ "$plugin_type" == "spark" ]; then
if [ -z "${SPARK_DOWNLOAD_URL:-}" -a -z "${DIB_CDH_VERSION:-}" ]; then
echo -e "Neither DIB_CDH_VERSION nor SPARK_DOWNLOAD_URL are set. Impossible to install Spark.\nAborting"
exit 1
fi
fi

View File

@ -13,25 +13,27 @@ mkdir -p $tmp_dir
if [ -z "${SPARK_DOWNLOAD_URL:-}" ]; then
# Check hadoop version
# INFO on hadoop versions: http://spark.apache.org/docs/latest/hadoop-third-party-distributions.html
case "$DIB_CDH_VERSION" in
5.0)
SPARK_HADOOP_DL=hadoop2.3
;;
5.3)
SPARK_HADOOP_DL=hadoop2.4
;;
5.4)
SPARK_HADOOP_DL=hadoop2.6
;;
CDH4)
SPARK_HADOOP_DL=cdh4
;;
*)
echo "WARNING: Cloudera CDH version $DIB_CDH_VERSION not supported."
echo "WARNING: use the SPARK_DOWNLOAD_URL variable to install a custom Spark version."
exit 1
;;
esac
if [ -z "${SPARK_HADOOP_DL:-}" ]; then
case "${DIB_CDH_VERSION:-}" in
5.0)
SPARK_HADOOP_DL=hadoop2.3
;;
5.3)
SPARK_HADOOP_DL=hadoop2.4
;;
5.4)
SPARK_HADOOP_DL=hadoop2.6
;;
CDH4)
SPARK_HADOOP_DL=cdh4
;;
*)
echo "WARNING: Cloudera CDH $DIB_CDH_VERSION not supported."
echo "WARNING: use the SPARK_DOWNLOAD_URL variable to install a custom Spark version."
exit 1
;;
esac
fi
SPARK_DOWNLOAD_URL="http://archive.apache.org/dist/spark/spark-$DIB_SPARK_VERSION/spark-$DIB_SPARK_VERSION-bin-$SPARK_HADOOP_DL.tgz"
fi