add download_file function in devstack plugin.sh

1.add download_file function in devstack plugin.sh,to avoid file downloaded when
reinstall.
2.when unstack.sh, the spark download dir is rm -rf ,and file downloaded is removed
too. so change spark download dir to devstack default files dir to avoid the probem.
3.change sudo -u stack to sudo -u stack -g stack avoid
"user stack is not allowed to execute" problem
Blueprint add-downloadfile-function

Change-Id: I9f096c32c7b9a826541dca229ec1a294088e1e81
This commit is contained in:
sven mark 2017-02-23 10:01:56 +08:00 committed by Daisuke Fujita
parent f3d5ed114f
commit e722185a59
2 changed files with 25 additions and 7 deletions

View File

@ -140,7 +140,7 @@ function install_pkg {
sudo -E apt-get -y install $JDK_PKG
## SCALA
sudo -E curl $SCALA_URL -o $SPARK_DOWNLOAD/$SCALA
download_through_cache $SCALA_URL $SCALA $SPARK_DOWNLOAD
sudo -E dpkg -i $SPARK_DOWNLOAD/$SCALA
echo "deb https://dl.bintray.com/sbt/debian /" | sudo -E tee -a /etc/apt/sources.list.d/sbt.list
sudo -E apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv $KEYID
@ -155,9 +155,9 @@ function install_pkg {
###
function build_spark {
## install maven
sudo -E curl $MAVEN_URL -o $SPARK_DOWNLOAD/$MAVEN_TARBALL
download_through_cache $MAVEN_URL $MAVEN_TARBALL $SPARK_DOWNLOAD
sudo chown stack:stack $SPARK_DOWNLOAD/$MAVEN_TARBALL
sudo -u stack tar -xzf $SPARK_DOWNLOAD/$MAVEN_TARBALL -C $SPARK_DIR
sudo -u stack -g stack tar -xzf $SPARK_DOWNLOAD/$MAVEN_TARBALL -C $SPARK_DIR
if [ ${http_proxy} ];then
read HTTP_PROXY_USER_NAME HTTP_PROXY_PASSWORD HTTP_PROXY_HOST<< END
@ -177,9 +177,9 @@ END
fi
## Build Spark
sudo -E curl $SPARK_URL -o $SPARK_DOWNLOAD/${SPARK_TARBALL_NAME}
download_through_cache $SPARK_URL ${SPARK_TARBALL_NAME} $SPARK_DOWNLOAD
sudo chown stack:stack $SPARK_DOWNLOAD/${SPARK_TARBALL_NAME}
sudo -u stack tar -xzf $SPARK_DOWNLOAD/${SPARK_TARBALL_NAME} -C $SPARK_DIR
sudo -u stack -g stack tar -xzf $SPARK_DOWNLOAD/${SPARK_TARBALL_NAME} -C $SPARK_DIR
DEVSTACK_DIR=`pwd`
cd $SPARK_DIR/spark-${SPARK_VERSION}
@ -221,7 +221,7 @@ function install_kafka {
sudo groupadd --system kafka || true
sudo useradd --system -g kafka kafka || true
sudo -E curl $KAFKA_URL -o $SPARK_DOWNLOAD/$KAFKA_TARBALL
download_through_cache $KAFKA_URL $KAFKA_TARBALL $SPARK_DOWNLOAD
sudo tar -xzf $SPARK_DOWNLOAD/$KAFKA_TARBALL -C /opt
sudo ln -sf /opt/kafka_${KAFKA_VERSION} /opt/kafka
@ -294,6 +294,22 @@ function extra_monasca_analytics {
:
}
function download_through_cache {
local resource_location=$1
local resource_name=$2
local download_dir=$3
if [[ ! -d ${download_dir} ]]; then
_safe_permission_operation mkdir -p ${download_dir}
_safe_permission_operation chown stack ${download_dir}
fi
pushd ${download_dir}
if [[ ! -f ${resource_name} ]]; then
sudo -E curl -m ${DOWNLOAD_FILE_TIMEOUT} --retry 3 --retry-delay 5 ${resource_location} -o ${resource_name}
fi
popd
}
# check for service enabled
echo_summary "Monasca-analytics plugin with service enabled = `is_service_enabled monasca-analytics`"

View File

@ -28,13 +28,15 @@ enable_service monasca-analytics
# Dependent Software Versions
#
DOWNLOAD_FILE_TIMEOUT=${DOWNLOAD_FILE_TIMEOUT:-1800}
# spark vars
SPARK_DIRECTORIES=("/var/spark" "/var/log/spark" "/var/run/spark/work" "/etc/spark/conf" "/etc/spark/init" )
JDK_PKG="openjdk-8-jre-headless openjdk-8-jdk"
MAVEN="apache-maven-3.5.3"
MAVEN_TARBAL="$MAVEN-bin.tar.gz"
MAVEN_TARBALL="$MAVEN-bin.tar.gz"
MAVEN_URL="https://archive.apache.org/dist/maven/maven-3/3.5.3/binaries/$MAVEN_TARBALL"
SCALA_VERSION=${SCALA_VERSION:-2.11}