allow get_extra_files to retry on common errors
This change add 4 new varibles to devstack to allow controlling
the behvaior fo downloadign files via get_extra_files
EXTRA_FILES_RETRY=${EXTRA_FILES_RETRY:-3}
EXTRA_FILES_RETRY_ERRORS=${EXTRA_FILES_RETRY_ERRORS:-"500,503"}
EXTRA_FILES_DOWNLOAD_TIMEOUT=${EXTRA_FILES_DOWNLOAD_TIMEOUT:-2}
EXTRA_FILES_RETRY_TIMEOUT=${EXTRA_FILES_RETRY_TIMEOUT:-10}
get_extra_files checks if a file is precached and downlaods
it if not present
The new fucntionaltiy modifes the wget invocation to retry
up to 3 times if there are dns name issues, connection refused
or the server return a 500 or 503
This change will not retry on 403 or 429 error codes
which are often used to indicate rate limits.
The download timeout remains at 2 seconds but
can now be set if required. A 10 second over all retry
wait timeout is added matching wgets default.
finally the use of sed to strip the file:// prefix
if present is replace with usign bash parmater expansion
echo "${file_url#file://}"
Assisted-By: gemini pro 3
Change-Id: Iedbb8974dce34a3d1c89f2a2561df76e696afae6
Signed-off-by: Sean Mooney <work@seanmooney.info>
This commit is contained in:
17
functions
17
functions
@@ -47,6 +47,10 @@ function short_source {
|
||||
# export it so child shells have access to the 'short_source' function also.
|
||||
export -f short_source
|
||||
|
||||
EXTRA_FILES_RETRY=${EXTRA_FILES_RETRY:-3}
|
||||
EXTRA_FILES_RETRY_ERRORS=${EXTRA_FILES_RETRY_ERRORS:-"500,503"}
|
||||
EXTRA_FILES_DOWNLOAD_TIMEOUT=${EXTRA_FILES_DOWNLOAD_TIMEOUT:-2}
|
||||
EXTRA_FILES_RETRY_TIMEOUT=${EXTRA_FILES_RETRY_TIMEOUT:-10}
|
||||
# Download a file from a URL
|
||||
#
|
||||
# Will check cache (in $FILES) or download given URL.
|
||||
@@ -55,17 +59,20 @@ export -f short_source
|
||||
#
|
||||
# Will echo the local path to the file as the output. Will die on
|
||||
# failure to download.
|
||||
#
|
||||
|
||||
# Files can be pre-cached for CI environments, see EXTRA_CACHE_URLS
|
||||
# and tools/image_list.sh
|
||||
function get_extra_file {
|
||||
local file_url=$1
|
||||
|
||||
file_name=$(basename "$file_url")
|
||||
local retry_args="--retry-on-host-error --retry-on-http-error=${EXTRA_FILES_RETRY_ERRORS} "
|
||||
retry_args+="-t ${EXTRA_FILES_DOWNLOAD_TIMEOUT} --waitretry=${EXTRA_FILES_RETRY_TIMEOUT} "
|
||||
retry_args+="--tries=${EXTRA_FILES_RETRY} --retry-connrefused"
|
||||
# Using Bash parameter expansion (##*/) instead of external 'basename'
|
||||
local file_name="${file_url##*/}"
|
||||
if [[ $file_url != file* ]]; then
|
||||
# If the file isn't cache, download it
|
||||
if [[ ! -f $FILES/$file_name ]]; then
|
||||
wget --progress=dot:giga -t 2 -c $file_url -O $FILES/$file_name
|
||||
wget --progress=dot:giga ${retry_args} -c $file_url -O $FILES/$file_name
|
||||
if [[ $? -ne 0 ]]; then
|
||||
die "$file_url could not be downloaded"
|
||||
fi
|
||||
@@ -74,7 +81,7 @@ function get_extra_file {
|
||||
return
|
||||
else
|
||||
# just strip the file:// bit and that's the path to the file
|
||||
echo $file_url | sed 's/$file:\/\///g'
|
||||
echo "${file_url#file://}"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user