Ignore docker image build errors in main build

Currently errors in docker image builds fail the entire build. Docker
images are in flux, and are likely to cause every build to fail.

Workaround: don't fail the main build if some docker images failed (but
print out a warning in Jenkins log).

Changes:
- Ignore docker image build errors with a warning
- Misc changes to scripts to make sure various steps happen in the right
  order

Story: 2010226
Task: 46146

Signed-off-by: Davlet Panech <davlet.panech@windriver.com>
Change-Id: Ib8869ce263731f7bce3157890c303ec5cec59fde
This commit is contained in:
Davlet Panech 2022-08-26 15:36:34 -04:00
parent 970cd6e068
commit 70b5f57491
12 changed files with 332 additions and 153 deletions

View File

@ -8,6 +8,10 @@
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
PROPS = null
IMG_PARAMS = null
IMAGES_FAILED = false
def parseProps(text) {
def x = {}
for (line in text.split (/\n+/)) {
@ -25,6 +29,7 @@ def parseProps(text) {
def loadEnv() {
def data = {}
data.NEED_BUILD = false
data.SUPPRESS_DOCKER_IMAGE_BUILD_ERRORS = true
ws(params.BUILD_HOME) {
if (fileExists ("NEED_BUILD")) {
data.NEED_BUILD = true
@ -34,12 +39,10 @@ def loadEnv() {
final props = parseProps (configText)
data.BUILD_OUTPUT_HOME_URL = props.BUILD_OUTPUT_HOME_URL
data.PUBLISH_URL = props.PUBLISH_URL
return data
PROPS = data
return data.NEED_BUILD
}
def PROPS = null
def IMG_PARAMS = null
def partJobName (name) {
final String folder = env.JOB_NAME.replaceAll (/(.*\/).+$/, '$1');
if (folder == env.JOB_NAME) {
@ -52,16 +55,44 @@ def runPart (name, params = []) {
build job: partJobName (name), parameters: copyCurrentParams() + params
}
def printBuildFooter(final props) {
if (props) {
def runImagesPart (name, params = []) {
if (!IMAGES_FAILED) {
final jobName = partJobName (name)
final res = build (
job: jobName,
parameters: copyCurrentParams() + IMG_PARAMS + params,
propagate: ! PROPS.SUPPRESS_DOCKER_IMAGE_BUILD_ERRORS
).result
if (res == 'ABORTED') {
// FIXME: make current build ABORTED here
error ("child job ${jobName} aborted")
}
if (res == 'SUCCESS' || res == 'UNSTABLE') {
return true
}
print ("*** ERROR: child job ${jobName} failed!")
IMAGES_FAILED = true
}
return false
}
def printBuildFooter() {
if (PROPS) {
String msg = ""
msg += "\n"
msg += "========================================\n"
msg += "\n"
if (props.NEED_BUILD) {
msg += "Build output: ${props.BUILD_OUTPUT_HOME_URL}\n"
if (props.PUBLISH_URL) {
msg += "Publish output: ${props.PUBLISH_URL}\n"
if (PROPS.NEED_BUILD) {
msg += "Build output: ${PROPS.BUILD_OUTPUT_HOME_URL}\n"
if (PROPS.PUBLISH_URL) {
msg += "Publish output: ${PROPS.PUBLISH_URL}\n"
}
if (IMAGES_FAILED) {
msg += "\n"
msg += "WARNING:\n"
msg += "WARNING: docker images build attempted, but failed!\n"
msg += "WARNING: see log output above\n"
msg += "WARNING:\n"
}
}
else {
@ -151,9 +182,6 @@ pipeline {
booleanParam (
name: 'PUSH_DOCKER_IMAGES'
)
booleanParam (
name: 'BUILD_HELM_CHARTS'
)
booleanParam (
name: 'IMPORT_BUILD'
)
@ -172,34 +200,47 @@ pipeline {
stage('INIT') {
steps {
script {
// Initialize BUILD_HOME, create build.conf & stx.conf
runPart ("init-env")
runPart ("stop-containers")
// Update source tree
runPart ("clone-source")
// create BUILD & stx.conf
runPart ("configure-build")
// Stop containers before updating source treee
runPart ("stop-containers")
// Create chnagelog, LAST_COMMITS, NEED_BUILD etc
runPart ("create-changelog")
PROPS = loadEnv()
if (!PROPS.NEED_BUILD) {
// Is build required?
if (loadEnv()) {
IMG_PARAMS = [ string (name: 'BUILD_STREAM', value: 'stable') ]
}
else {
println "*** NO CHANGES, BUILD NOT REQUIRED ***"
}
IMG_PARAMS = [ string (name: 'BUILD_STREAM', value: 'stable') ]
}
}
}
// This stage runs only if build is required
stage('X0') {
when { expression { PROPS.NEED_BUILD } }
stages {
stage('PREPARE') {
steps {
// Delete or keep packages, aptly state, etc depending on build params
runPart ("clean-build")
runPart ("configure-build")
// start containers
runPart ("start-containers")
// login to docker early to catch login errors
runPart ("docker-login")
}
}
// populate mirror/
stage('DOWNLOAD') {
steps {
runPart ("download-prerequisites")
}
}
// build packages
stage('PACKAGES') {
when { expression { params.BUILD_PACKAGES } }
steps {
@ -207,74 +248,92 @@ pipeline {
runPart ("publish-packages")
}
}
// Generate initial helm charts. We will re-generate them after
// building docker images, if requested in order to replace
// image tags by locally-built images
stage('HELM:initial') {
steps {
runPart ("build-helm-charts", IMG_PARAMS)
runPart ("publish-helm-charts", IMG_PARAMS)
}
}
// Build ISO & images in parallel
stage('X1') { parallel {
stage('ISO') {
when { expression { params.BUILD_ISO } }
steps {
steps { script {
runPart ("build-iso")
runPart ("publish-iso")
}
} }
} // stage('ISO')
stage('IMAGES') {
when { expression { params.BUILD_DOCKER_BASE_IMAGE || params.BUILD_DOCKER_IMAGES } }
stages {
stage('IMAGES:base') {
when { expression { params.BUILD_DOCKER_BASE_IMAGE } }
when { expression { ! IMAGES_FAILED && params.BUILD_DOCKER_BASE_IMAGE } }
steps { script {
runPart ("build-docker-base", IMG_PARAMS)
runImagesPart ("build-docker-base")
} }
}
stage('IMAGES:wheels') {
when { expression { params.BUILD_DOCKER_IMAGES } }
when { expression { ! IMAGES_FAILED && params.BUILD_DOCKER_IMAGES } }
steps { script {
runPart ("build-wheels", IMG_PARAMS)
runPart ("publish-wheels", IMG_PARAMS)
runImagesPart ("build-wheels")
runImagesPart ("publish-wheels")
} }
}
stage('IMAGES:images') {
when { expression { params.BUILD_DOCKER_IMAGES } }
when { expression { ! IMAGES_FAILED && params.BUILD_DOCKER_IMAGES } }
steps { script {
runPart ("build-docker-images", IMG_PARAMS)
runPart ("publish-docker-images", IMG_PARAMS)
runImagesPart ("build-docker-images")
runImagesPart ("publish-docker-images")
} }
}
stage('IMAGES:helm') {
when { expression { params.BUILD_DOCKER_IMAGES && params.BUILD_HELM_CHARTS } }
// Rebuild helm charts even if image builds failed.
// This will record any images that were built sucessfully in the helm charts
when { expression { params.BUILD_DOCKER_IMAGES } }
steps { script {
runPart ("build-helm-charts", IMG_PARAMS)
runPart ("publish-helm-charts", IMG_PARAMS)
} }
}
}
stage('IMAGES:symlinks') {
when { expression { ! IMAGES_FAILED } }
steps { script {
// copy image lists to publish root and create the "latest_docker_image_build" symlinks
// in publish and archive roots
sh ("BUILD_STATUS=success ${Constants.SCRIPTS_DIR}/create-latest-containers-symlinks.sh")
} }
}
} // stages
} // stage('IMAGES')
} }// stage('X1')
} // stages
post {
always {
echo "build result: ${currentBuild.result}"
runPart ("stop-containers")
notAborted {
runPart ("archive-misc")
runPart ("archive-misc") // archive anything we may have missed
saveCurrentJenkinsBuildInfo() // save this job's build number on disk (for publish-logs)
}
}
success {
// copy LAST_COMMITS to archive root & update the "latest_build" symlink in
// both archive and publish roots
sh ("BUILD_STATUS=success ${Constants.SCRIPTS_DIR}/create-latest-symlinks.sh")
printBuildFooter() // Print archive & publish URLs
runPart ("publish-logs") // publish this job's Jenkins log
}
unsuccessful {
unsuccessful { notAborted {
sh ("BUILD_STATUS=fail ${Constants.SCRIPTS_DIR}/create-latest-symlinks.sh")
}
runPart ("publish-logs") // publish this job's Jenkins log
} }
}
} // stage X0
} // stages
post {
cleanup {
saveCurrentJenkinsBuildInfo()
notAborted {
runPart ("publish-logs")
}
printBuildFooter (PROPS)
}
}
}

View File

@ -10,12 +10,12 @@ set -e
source $(dirname "$0")/lib/job_utils.sh
load_build_env
require_env BUILD_ISO
LAT_SUBDIR="localdisk/deploy"
#VERBOSE_ARG="--verbose"
$BUILD_ISO || bail "BUILD_ISO=false, bailing out"
declare -a iso_files
@ -27,9 +27,7 @@ if [[ -d "${src_dir}" ]] ; then
notice "archving $src_dir"
mkdir -p "$dst_dir"
safe_copy_dir $DRY_RUN_ARG $VERBOSE_ARG "${src_dir}/" "${dst_dir}/"
if [[ -e "${dst_dir}/deploy" ]] ; then
iso_files+=($(find "${dst_dir}/deploy" -mindepth 1 -maxdepth 1 -type f))
fi
iso_files+=($(find "${dst_dir}" -mindepth 1 -maxdepth 1 -type f))
fi
if [[ "${#iso_files[@]}" -gt 0 ]] ; then

View File

@ -6,6 +6,12 @@
# SPDX-License-Identifier: Apache-2.0
#
# Archive evrything except:
# - symlinks that point to $BUILD_OUTPUT_HOME
# - localdisk/deploy/ which is archive separately by archive-iso.sh
# - large temp dirs left behind by ostree
#
set -e
source $(dirname "$0")/lib/job_utils.sh
@ -13,27 +19,19 @@ load_build_env
#VERBOSE_ARG="--verbose"
exclude_args=()
exclude_args+=(--exclude "/localdisk/designer/**") # symlink inside
exclude_args+=(--exclude "/aptly") # symlink
exclude_args+=(--exclude "/mirrors") # symlink
exclude_args+=(--exclude "/docker") # symlink
exclude_args+=(--exclude "/workspace") # symlink
exclude_args+=(--exclude "/repo") # symlink
exclude_args+=(--exclude "/localdisk/workdir/**") # ostree temp files
exclude_args+=(--exclude "/localdisk/sub_workdir/workdir/**") # ostree temp files
exclude_args+=(--exclude "/localdisk/deploy/**") # archived by archive-iso.sh
mkdir -p "$BUILD_OUTPUT_HOME"
safe_copy_dir $DRY_RUN_ARG $VERBOSE_ARG \
--exclude /aptly \
--exclude /localdisk/channel/\*\* \
--exclude /localdisk/designer \
--exclude /mirrors \
--exclude /localdisk/lat \
"${exclude_args[@]}" \
"$BUILD_HOME/" "$BUILD_OUTPUT_HOME/"
# localdist/loadbuild/$USER
mkdir -p "$BUILD_OUTPUT_HOME/$(dirname "$REPO_ROOT_SUBDIR")"
# localdisk/designer/$USER/$PROJECT => $BUILD_HOME/...
ln -sfn "$BUILD_HOME/$REPO_ROOT_SUBDIR" "$BUILD_OUTPUT_HOME/$REPO_ROOT_SUBDIR"
# repo => localdisk/designer/$USER/$PROJECT
ln -sfn "$REPO_ROOT_SUBDIR" "$BUILD_OUTPUT_HOME/repo"
# workspace => localdist/loadbuild/$USER/PROJECT
ln -sfn "$WORKSPACE_ROOT_SUBDIR" "$BUILD_OUTPUT_HOME/workspace"
# aptly => $BUILD_HOME/...
ln -sfn "$BUILD_HOME/aptly" "$BUILD_OUTPUT_HOME/aptly"

View File

@ -13,7 +13,6 @@ require_env BUILD_HOME
require_env BUILD_PACKAGES
declare_env BUILD_PACKAGES_LIST
require_env BUILD_RT
require_env CLEAN_PACKAGES
require_env BUILD_ISO
declare_env BUILD_PACKAGES_PARALLEL_JOBS
@ -22,7 +21,6 @@ load_build_env
$BUILD_PACKAGES || bail "BUILD_PACKAGES=false, skipping build"
BUILD_PACKAGES_LIST=$(trim $(echo $BUILD_PACKAGES_LIST | sed 's/,/ /g'))
info "CLEAN_PACKAGES=$CLEAN_PACKAGES"
info "BUILD_PACKAGES_LIST=$BUILD_PACKAGES_LIST"
# Always build std, rt only if requested
@ -43,12 +41,6 @@ declare -a extra_args
while [[ $count -lt $BUILD_PACKAGES_ITERATIONS ]] ; do
extra_args=()
# # clean on 1st iteration only if CLEAN_BUILD was set and we are building
# # specific packages
# if [[ $count == 0 ]] && $CLEAN_PACKAGES && [[ -n $BUILD_PACKAGES_LIST ]] ; then
# extra_args+=("-c")
# fi
# Either build specific or all packages
if [[ -n $BUILD_PACKAGES_LIST ]] ; then
extra_args+=("-p" "$(echo $BUILD_PACKAGES_LIST | sed 's/ /,/g')")

View File

@ -24,9 +24,9 @@ load_build_env
VERBOSE_ARG=
clean_or_import() {
local -a exclude_args
while [[ "$1" == "--exclude" ]] ; do
exclude_args+=("$1" "$2")
local -a exclude_toplevel_patterns
while [[ "$1" == "--exclude-toplevel" ]] ; do
exclude_toplevel_patterns+=("$2")
shift 2
done
local src_subdir="$1"
@ -35,6 +35,7 @@ clean_or_import() {
local allow_merge="${4:-false}"
local src_dir="$IMPORT_BUILD_DIR/$src_subdir"
local dst_dir="$BUILD_HOME/$dst_subdir"
if $IMPORT_BUILD && [[ -n "$IMPORT_BUILD_DIR" ]] && [[ -d "$src_dir" ]] ; then
local real_src_dir
real_src_dir="$(readlink -f "$src_dir")"
@ -42,16 +43,54 @@ clean_or_import() {
if ! $allow_merge ; then
delete_arg="--delete"
fi
notice "importing $src_subdir from $IMPORT_BUILD_DIR"
dst_dir="$BUILD_HOME/$dst_subdir"
local -a exclude_args
local exclude_msg_suffix
if [[ "${#exclude_toplevel_patterns[@]}" -gt 0 ]] ; then
local pattern
for pattern in "${exclude_toplevel_patterns[@]}" ; do
exclude_args+=("--exclude" "/$pattern")
done
exclude_msg_suffix=" except [${exclude_toplevel_patterns[*]}]"
fi
notice "importing $src_subdir from ${IMPORT_BUILD_DIR}/${exclude_msg_suffix}"
mkdir -p "$dst_dir"
safe_copy_dir $DRY_RUN_ARG $VERBOSE_ARG $delete_arg "${exclude_args[@]}" \
"$real_src_dir/" "$dst_dir/"
return
fi
if $clean_requested ; then
notice "removing $dst_subdir"
safe_rm $DRY_RUN_ARG $VERBOSE_ARG "$BUILD_HOME/$dst_subdir"/*
if $clean_requested && [[ -d "$dst_dir" ]] ; then
local exclude_msg_suffix
local -a find_args=(
"$dst_dir" -mindepth 1 -maxdepth 1
)
if [[ "${#exclude_toplevel_patterns[@]}" -gt 0 ]] ; then
local -i i
local -a find_exclude_args
for (( i=0 ; i<"${#exclude_toplevel_patterns[@]}" ; ++i)) ; do
local pattern="${exclude_toplevel_patterns[$i]}"
if [[ $i -gt 0 ]] ; then
find_exclude_args+=("-o")
fi
find_exclude_args+=("-name" "$pattern")
done
exclude_msg_suffix=" except [${exclude_toplevel_patterns[*]}]"
find_args+=('(' '(' "${find_exclude_args[@]}" ')' '-prune' ')' '-o')
fi
find_args+=('-print')
local find_res="$(find "${find_args[@]}")" || exit 1
local -a rm_files
if [[ -n "$find_res" ]] ; then
readarray -t rm_files <<<"$find_res" || exit 1
fi
if [[ "${#rm_files[@]}" -gt 0 ]] ; then
notice "removing all files from $dst_subdir/${exclude_msg_suffix}"
safe_rm $DRY_RUN_ARG $VERBOSE_ARG "${rm_files[@]}"
fi
fi
}
@ -77,24 +116,36 @@ if [[ -d "$BUILD_HOME/localdisk/loadbuild" ]] ; then
fi
fi
clean_or_import --exclude /meta-lat --exclude /tmp --exclude /sign \
"workspace" "$WORKSPACE_ROOT_SUBDIR" $CLEAN_PACKAGES
clean_or_import --exclude-toplevel 'BUILD' \
"workspace" "$WORKSPACE_ROOT_SUBDIR" $CLEAN_PACKAGES
clean_or_import "mirrors" "mirrors" $CLEAN_DOWNLOADS true
clean_or_import "aptly" "aptly" $CLEAN_REPOMGR
clean_or_import "docker" "docker" $CLEAN_DOCKER
clean_or_import "docker" "lat" $CLEAN_ISO
clean_or_import "mirrors" "mirrors" $CLEAN_DOWNLOADS true
clean_or_import "aptly" "aptly" $CLEAN_REPOMGR
clean_or_import "docker" "docker" $CLEAN_DOCKER
clean_or_import "localdisk/deploy" "localdisk/deploy" $CLEAN_ISO
# these files can't be imported, always delete them
notice "removing misc files"
safe_rm $DRY_RUN_ARG $VERBOSE_ARG \
misc_rm=(
"$BUILD_HOME"/localdisk/*.log \
"$BUILD_HOME"/localdisk/log \
"$BUILD_HOME"/localdisk/channel \
"$BUILD_HOME"/localdisk/deploy \
"$BUILD_HOME"/localdisk/pkgbuilder \
"$BUILD_HOME"/localdisk/workdir \
"$BUILD_HOME"/localdisk/sub_workdir \
"$BUILD_HOME"/localdisk/tmp \
"$BUILD_HOME"/lat \
\
"$BUILD_OUTPUT_HOME"/{SUCCESS,FAILURE,NEED_BUILD,NO_BUILD_REQUIRED,LAST_COMMITS*,CHANGES}
"$BUILD_HOME"/workspace/*.log \
"$BUILD_HOME"/workspace/std/build-images \
"$BUILD_HOME"/workspace/std/build-wheels* \
"$BUILD_HOME"/workspace/std/build-helm \
"$BUILD_HOME"/workspace/"export" \
"$BUILD_HOME"/workspace/helm-charts
)
rm_args=()
for path in "${misc_rm[@]}" ; do
[[ -e "$path" ]] || continue
rm_args+=("$path")
done
if [[ "${#rm_args[@]}" -gt 0 ]] ; then
notice "removing misc files"
safe_rm $DRY_RUN_ARG $VERBOSE_ARG "${rm_args[@]}"
fi

View File

@ -9,38 +9,8 @@
set -e
source $(dirname "$0")/lib/job_utils.sh
set -x
load_build_env
######################################################
# stx.conf
######################################################
rm -f stx.conf
unset DEBIAN_DISTRIBUTION DEBIAN_SNAPSHOT DEBIAN_SECURITY_SNAPSHOT
source ./import-stx
stx config --add builder.myuname "$USER"
stx config --add builder.uid "$USER_ID"
# Embedded in ~/localrc of the build container
stx config --add project.gituser "$USER_NAME"
stx config --add project.gitemail $USER_EMAIL
# This will be included in the name of your build container and the basename for $MY_REPO_ROOT_DIR
stx config --add project.name "$PROJECT"
stx config --add project.proxy false
# debian distro & urls
if [[ -n "$DEBIAN_SNAPSHOT_BASE" ]] ; then
stx config --add project.debian_snapshot_base "$DEBIAN_SNAPSHOT_BASE"
fi
if [[ -n "$DEBIAN_SECURITY_SNAPSHOT_BASE" ]] ; then
stx config --add project.debian_security_snapshot_base "$DEBIAN_SECURITY_SNAPSHOT_BASE"
fi
notice "$PWD/stx.conf"
cat stx.conf
######################################################
# BUILD file
######################################################
@ -81,3 +51,32 @@ _END
notice "$build_info_file"
cat "$build_info_file"
######################################################
# stx.conf
######################################################
cd "$BUILD_HOME/repo/stx-tools"
rm -f stx.conf
unset DEBIAN_DISTRIBUTION DEBIAN_SNAPSHOT DEBIAN_SECURITY_SNAPSHOT
source ./import-stx
stx config --add builder.myuname "$USER"
stx config --add builder.uid "$USER_ID"
# Embedded in ~/localrc of the build container
stx config --add project.gituser "$USER_NAME"
stx config --add project.gitemail $USER_EMAIL
# This will be included in the name of your build container and the basename for $MY_REPO_ROOT_DIR
stx config --add project.name "$PROJECT"
stx config --add project.proxy false
# debian distro & urls
if [[ -n "$DEBIAN_SNAPSHOT_BASE" ]] ; then
stx config --add project.debian_snapshot_base "$DEBIAN_SNAPSHOT_BASE"
fi
if [[ -n "$DEBIAN_SECURITY_SNAPSHOT_BASE" ]] ; then
stx config --add project.debian_security_snapshot_base "$DEBIAN_SECURITY_SNAPSHOT_BASE"
fi
notice "$PWD/stx.conf"
cat stx.conf

View File

@ -17,7 +17,16 @@ require_env BUILD_DOCKER_IMAGES_STABLE
load_build_env
rm -f "$BUILD_HOME"/{CHANGELOG*,LAST_COMMITS,NEED_BUILD,NO_BUILD_REQUIRED}
find_changelog_files() {
find "$BUILD_HOME" -mindepth 1 -maxdepth 1 \
-name 'CHANGELOG*' -o \
-name 'LAST_COMMITS*' -o \
-name 'NEED_BUILD' -o \
-name 'NO_BUILD_REQUIRED'
}
find_changelog_files | xargs -r rm -f
check_pipe_status
(
MY_WORKSPACE="$BUILD_HOME"
@ -28,3 +37,26 @@ rm -f "$BUILD_HOME"/{CHANGELOG*,LAST_COMMITS,NEED_BUILD,NO_BUILD_REQUIRED}
fi
)
if [[ -f "$BUILD_HOME/NEED_BUILD" ]] ; then
rm -f SUCCESS FAIL FAILURE
# BUILD_OUTPUT_HOME
mkdir -p "$BUILD_OUTPUT_HOME"
# repo
mkdir -p "$BUILD_OUTPUT_HOME/$(dirname "$REPO_ROOT_SUBDIR")"
ln -sfn "$BUILD_HOME/$REPO_ROOT_SUBDIR" "$BUILD_OUTPUT_HOME/$REPO_ROOT_SUBDIR"
ln -sfn "$REPO_ROOT_SUBDIR" "$BUILD_OUTPUT_HOME/repo"
# workspace
mkdir -p "$BUILD_OUTPUT_HOME/$WORKSPACE_ROOT_SUBDIR"
ln -sfn "$WORKSPACE_ROOT_SUBDIR" "$BUILD_OUTPUT_HOME/workspace"
# mirrors
ln -sfn "$BUILD_HOME/mirrors" "$BUILD_OUTPUT_HOME/mirrors"
# aptly
ln -sfn "$BUILD_HOME/aptly" "$BUILD_OUTPUT_HOME/aptly"
# docker
ln -sfn "$BUILD_HOME/docker" "$BUILD_OUTPUT_HOME/docker"
# copy changelog etc files
find_changelog_files | xargs -r cp -v -t "$BUILD_OUTPUT_HOME"
check_pipe_status
fi

View File

@ -0,0 +1,53 @@
#!/bin/bash
#
# Copyright (c) 2022 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
set -e
source $(dirname "$0")/lib/job_utils.sh
require_env BUILD_STATUS
load_build_env
if $DRY_RUN ; then
bail "DRY_RUN=true, bailing out..."
fi
ARCHIVE_ROOT=$(dirname "$BUILD_OUTPUT_HOME")
set -x
if [[ "$BUILD_STATUS" == "success" ]] ; then
ARCHIVE_ROOT=$(dirname "$BUILD_OUTPUT_HOME")
link_target=$(basename "$BUILD_OUTPUT_HOME")
# find image list files
image_list_files_str="$(
find "$BUILD_OUTPUT_HOME/workspace/std/build-images" \
-mindepth 1 -maxdepth 1 \
-type f -name 'images-*.lst' \
| sed -r -e 's/^\s+//' -e 's/\s+$//' \
| grep -E -v -e '^\s#' -e '^\s*$'
)" || exit 1
declare -a image_list_files
readarray -t image_list_files <<<"$image_list_files_str"
# more than one - success
if [[ "${#image_list_files[@]}" -gt 0 ]] ; then
# archive LAST_COMMITS & latest symlink
cp "$BUILD_OUTPUT_HOME/LAST_COMMITS" "$ARCHIVE_ROOT/LAST_COMMITS_IMG_STABLE"
ln -sfn "$link_target" "$ARCHIVE_ROOT/latest_docker_image_build"
# publish image lists & latest symlink
cp "${image_list_files[@]}" "$PUBLISH_ROOT/"
if ! same_path "$ARCHIVE_ROOT" "$PUBLISH_ROOT" ; then
ln -sfn "$link_target" "$PUBLISH_ROOT/latest_docker_image_build"
fi
fi
fi

View File

@ -10,8 +10,6 @@ set -e
source $(dirname "$0")/lib/job_utils.sh
require_env BUILD_STATUS
require_env BUILD_DOCKER_IMAGES
require_env BUILD_DOCKER_BASE_IMAGE
load_build_env
@ -19,42 +17,26 @@ if $DRY_RUN ; then
bail "DRY_RUN=true, bailing out..."
fi
set -x
touch "$BUILD_OUTPUT_HOME/FAIL"
ARCHIVE_ROOT=$(dirname "$BUILD_OUTPUT_HOME")
same_path() {
if [[ ! -e "$1" && ! -e "$2" ]] ; then
return 1
fi
local a b
a="$(readlink -f "$1")" || exit 1
b="$(readlink -f "$2")" || exit 1
[[ "$a" == "$b" ]]
}
if [[ "$BUILD_STATUS" == "success" ]] ; then
ARCHIVE_ROOT=$(dirname "$BUILD_OUTPUT_HOME")
link_target=$(basename "$BUILD_OUTPUT_HOME")
cp "$BUILD_OUTPUT_HOME/LAST_COMMITS" "$ARCHIVE_ROOT/"
ln -sfn "$link_target" "$ARCHIVE_ROOT/latest_build"
if $BUILD_DOCKER_IMAGES || $BUILD_DOCKER_BASE_IMAGE ; then
cp "$BUILD_OUTPUT_HOME/LAST_COMMITS" "$ARCHIVE_ROOT/LAST_COMMITS_IMG_STABLE"
ln -sfn "$link_target" "$ARCHIVE_ROOT/latest_docker_image_build"
fi
rm -f "$BUILD_OUTPUT_HOME/FAIL"
touch "$BUILD_OUTPUT_HOME/SUCCESS"
mkdir -p "$PUBLISH_ROOT"
if ! same_path "$PUBLISH_ROOT" "$ARCHIVE_ROOT" ; then
link_target="${PUBLISH_ROOT}/$PUBLISH_TIMESTAMP"
if [[ -d "$link_target" ]] ; then
ln -sfn "$PUBLISH_TIMESTAMP" "$PUBLISH_ROOT/latest_build"
fi
if $BUILD_DOCKER_IMAGES || $BUILD_DOCKER_BASE_IMAGE ; then
ln -sfn "$link_target" "$PUBLISH_ROOT/latest_docker_image_build"
if [[ -d "$PUBLISH_ROOT/$TIMESTAMP" ]] ; then
mkdir -p "$PUBLISH_DIR/outputs"
touch "$PUBLISH_DIR/outputs/Success"
if ! same_path "$ARCHIVE_ROOT" "$PUBLISH_ROOT" ; then
ln -sfn "$link_target" "$PUBLISH_ROOT/latest_build"
fi
fi
fi

View File

@ -47,7 +47,12 @@ else
mkdir "$BUILD_HOME/jenkins"
fi
# Create symlinks
mkdir -p "$BUILD_HOME/$REPO_ROOT_SUBDIR" "$BUILD_HOME/$WORKSPACE_ROOT_SUBDIR"
# Create dirs & symlinks
mkdir -p "$BUILD_HOME/$REPO_ROOT_SUBDIR" \
"$BUILD_HOME/$WORKSPACE_ROOT_SUBDIR" \
"$BUILD_HOME/aptly" \
"$BUILD_HOME/mirrors" \
"$BUILD_HOME/docker"
ln -sfn "$REPO_ROOT_SUBDIR" "$BUILD_HOME/repo"
ln -sfn "$WORKSPACE_ROOT_SUBDIR" "$BUILD_HOME/workspace"

View File

@ -81,3 +81,13 @@ check_pipe_status() {
done
return 0
}
same_path() {
if [[ ! -e "$1" && ! -e "$2" ]] ; then
return 1
fi
local a b
a="$(readlink -f "$1")" || exit 1
b="$(readlink -f "$2")" || exit 1
[[ "$a" == "$b" ]]
}

View File

@ -14,7 +14,7 @@ fi
if [[ "$BUILD_USER" != "$(id -un)" ]] ; then
echo "This script can be used only by user $BUILD_USER!" >&2
exit 1
return 1
fi
BUILD_HOME="$(readlink -f "$(dirname "${BASH_SOURCE[0]}")")"