Make list_images.sh emit the etcd3 tarball

We need this for every devstack run now, so downloading it from github
every time isn't the most awesome thing in the world.

Add an extra variable EXTRA_CACHE_URLS which will be appended to the
output of tools/image_list.sh.  This way, these files will be
downloaded during the daily nodepool build, but they will not be in
the IMAGE_LIST and hence be considered as images to upload.

Add a function get_extra_file which echos the path to a file given the
URL.  It will first check the cache at $FILES, and if not present
download it.

Update the documentation in image_list.sh to reflect what's happening.

Move the defaults for etcd variables into stackrc, since it is a base
service now.

Change-Id: I86104824a29d973a6288df1f24b7891feb86267c
This commit is contained in:
Monty Taylor 2017-09-03 12:13:59 -05:00 committed by Ian Wienand
parent 7e9ec03af4
commit d8bb220606
4 changed files with 89 additions and 31 deletions

View File

@ -45,6 +45,37 @@ function short_source {
# export it so child shells have access to the 'short_source' function also. # export it so child shells have access to the 'short_source' function also.
export -f short_source export -f short_source
# Download a file from a URL
#
# Will check cache (in $FILES) or download given URL.
#
# Argument is the URL to the remote file
#
# Will echo the local path to the file as the output. Will die on
# failure to download.
#
# Files can be pre-cached for CI environments, see EXTRA_CACHE_URLS
# and tools/image_list.sh
function get_extra_file {
local file_url=$1
file_name=$(basename "$file_url")
if [[ $file_url != file* ]]; then
# If the file isn't cache, download it
if [[ ! -f $FILES/$file_name ]]; then
wget --progress=dot:giga -c $file_url -O $FILES/$file_name
if [[ $? -ne 0 ]]; then
die "$file_url could not be downloaded"
fi
fi
echo "$FILES/$file_name"
return
else
# just strip the file:// bit and that's the path to the file
echo $file_url | sed 's/$file:\/\///g'
fi
}
# Retrieve an image from a URL and upload into Glance. # Retrieve an image from a URL and upload into Glance.
# Uses the following variables: # Uses the following variables:

View File

@ -24,15 +24,9 @@ set +o xtrace
# -------- # --------
# Set up default values for etcd # Set up default values for etcd
ETCD_DOWNLOAD_URL=${ETCD_DOWNLOAD_URL:-https://github.com/coreos/etcd/releases/download}
ETCD_VERSION=${ETCD_VERSION:-v3.1.7}
ETCD_DATA_DIR="$DATA_DIR/etcd" ETCD_DATA_DIR="$DATA_DIR/etcd"
ETCD_SYSTEMD_SERVICE="devstack@etcd.service" ETCD_SYSTEMD_SERVICE="devstack@etcd.service"
ETCD_BIN_DIR="$DEST/bin" ETCD_BIN_DIR="$DEST/bin"
ETCD_SHA256_AMD64="4fde194bbcd259401e2b5c462dfa579ee7f6af539f13f130b8f5b4f52e3b3c52"
# NOTE(sdague): etcd v3.1.7 doesn't have anything for these architectures, though 3.2.0 does.
ETCD_SHA256_ARM64=""
ETCD_SHA256_PPC64=""
ETCD_PORT=2379 ETCD_PORT=2379
if is_ubuntu ; then if is_ubuntu ; then
@ -95,37 +89,19 @@ function cleanup_etcd3 {
function install_etcd3 { function install_etcd3 {
echo "Installing etcd" echo "Installing etcd"
# Make sure etcd3 downloads the correct architecture
if is_arch "x86_64"; then
ETCD_ARCH="amd64"
ETCD_SHA256=${ETCD_SHA256:-$ETCD_SHA256_AMD64}
elif is_arch "aarch64"; then
ETCD_ARCH="arm64"
ETCD_SHA256=${ETCD_SHA256:-$ETCD_SHA256_ARM64}
elif is_arch "ppc64le"; then
ETCD_ARCH="ppc64le"
ETCD_SHA256=${ETCD_SHA256:-$ETCD_SHA256_PPC64}
else
exit_distro_not_supported "invalid hardware type - $ETCD_ARCH"
fi
ETCD_NAME=etcd-$ETCD_VERSION-linux-$ETCD_ARCH
# Create the necessary directories # Create the necessary directories
sudo mkdir -p $ETCD_BIN_DIR sudo mkdir -p $ETCD_BIN_DIR
sudo mkdir -p $ETCD_DATA_DIR sudo mkdir -p $ETCD_DATA_DIR
# Download and cache the etcd tgz for subsequent use # Download and cache the etcd tgz for subsequent use
local etcd_file
etcd_file="$(get_extra_file $ETCD_DOWNLOAD_LOCATION)"
if [ ! -f "$FILES/etcd-$ETCD_VERSION-linux-$ETCD_ARCH/etcd" ]; then if [ ! -f "$FILES/etcd-$ETCD_VERSION-linux-$ETCD_ARCH/etcd" ]; then
ETCD_DOWNLOAD_FILE=$ETCD_NAME.tar.gz echo "${ETCD_SHA256} $etcd_file" > $FILES/etcd.sha256sum
if [ ! -f "$FILES/$ETCD_DOWNLOAD_FILE" ]; then
wget $ETCD_DOWNLOAD_URL/$ETCD_VERSION/$ETCD_DOWNLOAD_FILE -O $FILES/$ETCD_DOWNLOAD_FILE
fi
echo "${ETCD_SHA256} $FILES/${ETCD_DOWNLOAD_FILE}" > $FILES/etcd.sha256sum
# NOTE(sdague): this should go fatal if this fails # NOTE(sdague): this should go fatal if this fails
sha256sum -c $FILES/etcd.sha256sum sha256sum -c $FILES/etcd.sha256sum
tar xzvf $FILES/$ETCD_DOWNLOAD_FILE -C $FILES tar xzvf $etcd_file -C $FILES
sudo cp $FILES/$ETCD_NAME/etcd $ETCD_BIN_DIR/etcd sudo cp $FILES/$ETCD_NAME/etcd $ETCD_BIN_DIR/etcd
fi fi
if [ ! -f "$ETCD_BIN_DIR/etcd" ]; then if [ ! -f "$ETCD_BIN_DIR/etcd" ]; then

34
stackrc
View File

@ -732,6 +732,40 @@ if [[ "$DOWNLOAD_DEFAULT_IMAGES" == "True" ]]; then
DOWNLOAD_DEFAULT_IMAGES=False DOWNLOAD_DEFAULT_IMAGES=False
fi fi
# This is a comma separated list of extra URLS to be listed for
# download by the tools/image_list.sh script. CI environments can
# pre-download these URLS and place them in $FILES. Later scripts can
# then use "get_extra_file <url>" which will print out the path to the
# file; it will either be downloaded on demand or acquired from the
# cache if there.
EXTRA_CACHE_URLS=""
# etcd3 defaults
ETCD_VERSION=${ETCD_VERSION:-v3.1.7}
ETCD_SHA256_AMD64="4fde194bbcd259401e2b5c462dfa579ee7f6af539f13f130b8f5b4f52e3b3c52"
# NOTE(sdague): etcd v3.1.7 doesn't have anything for these architectures, though 3.2.0 does.
ETCD_SHA256_ARM64=""
ETCD_SHA256_PPC64=""
# Make sure etcd3 downloads the correct architecture
if is_arch "x86_64"; then
ETCD_ARCH="amd64"
ETCD_SHA256=${ETCD_SHA256:-$ETCD_SHA256_AMD64}
elif is_arch "aarch64"; then
ETCD_ARCH="arm64"
ETCD_SHA256=${ETCD_SHA256:-$ETCD_SHA256_ARM64}
elif is_arch "ppc64le"; then
ETCD_ARCH="ppc64le"
ETCD_SHA256=${ETCD_SHA256:-$ETCD_SHA256_PPC64}
else
exit_distro_not_supported "invalid hardware type - $ETCD_ARCH"
fi
ETCD_DOWNLOAD_URL=${ETCD_DOWNLOAD_URL:-https://github.com/coreos/etcd/releases/download}
ETCD_NAME=etcd-$ETCD_VERSION-linux-$ETCD_ARCH
ETCD_DOWNLOAD_FILE=$ETCD_NAME.tar.gz
ETCD_DOWNLOAD_LOCATION=$ETCD_DOWNLOAD_URL/$ETCD_VERSION/$ETCD_DOWNLOAD_FILE
# etcd is always required, so place it into list of pre-cached downloads
EXTRA_CACHE_URLS+=",$ETCD_DOWNLOAD_LOCATION"
# Detect duplicate values in IMAGE_URLS # Detect duplicate values in IMAGE_URLS
for image_url in ${IMAGE_URLS//,/ }; do for image_url in ${IMAGE_URLS//,/ }; do
if [ $(echo "$IMAGE_URLS" | grep -o -F "$image_url" | wc -l) -gt 1 ]; then if [ $(echo "$IMAGE_URLS" | grep -o -F "$image_url" | wc -l) -gt 1 ]; then

View File

@ -1,5 +1,14 @@
#!/bin/bash #!/bin/bash
# Print out a list of image and other files to download for caching.
# This is mostly used by the OpenStack infrasturucture during daily
# image builds to save the large images to /opt/cache/files (see [1])
#
# The two lists of URL's downloaded are the IMAGE_URLS and
# EXTRA_CACHE_URLS, which are setup in stackrc
#
# [1] project-config:nodepool/elements/cache-devstack/extra-data.d/55-cache-devstack-repos
# Keep track of the DevStack directory # Keep track of the DevStack directory
TOP_DIR=$(cd $(dirname "$0")/.. && pwd) TOP_DIR=$(cd $(dirname "$0")/.. && pwd)
@ -31,12 +40,20 @@ for driver in $DRIVERS; do
ALL_IMAGES+=$URLS ALL_IMAGES+=$URLS
done done
# Make a nice list
echo $ALL_IMAGES | tr ',' '\n' | sort | uniq
# Sanity check - ensure we have a minimum number of images # Sanity check - ensure we have a minimum number of images
num=$(echo $ALL_IMAGES | tr ',' '\n' | sort | uniq | wc -l) num=$(echo $ALL_IMAGES | tr ',' '\n' | sort | uniq | wc -l)
if [[ "$num" -lt 4 ]]; then if [[ "$num" -lt 4 ]]; then
echo "ERROR: We only found $num images in $ALL_IMAGES, which can't be right." echo "ERROR: We only found $num images in $ALL_IMAGES, which can't be right."
exit 1 exit 1
fi fi
# This is extra non-image files that we want pre-cached. This is kept
# in a separate list because devstack loops over the IMAGE_LIST to
# upload files glance and these aren't images. (This was a bit of an
# after-thought which is why the naming around this is very
# image-centric)
URLS=$(source $TOP_DIR/stackrc && echo $EXTRA_CACHE_URLS)
ALL_IMAGES+=$URLS
# Make a nice combined list
echo $ALL_IMAGES | tr ',' '\n' | sort | uniq