Multi-part build Jenkins pipelines for StarlingX

Signed-off-by: Davlet Panech <davlet.panech@windriver.com>
This commit is contained in:
Davlet Panech 2022-05-12 09:34:20 -04:00
commit a70033aec7
74 changed files with 5275 additions and 0 deletions

6
lib/build_utils.sh Normal file
View File

@ -0,0 +1,6 @@
# bash
stx_docker_cmd() {
echo '[[ -f ~/buildrc ]] && source ~/buildrc || : ; [[ -f ~/localrc ]] && source ~/localrc || : ; ' "$1" | stx control enter
}

359
lib/changelog_utils.sh Normal file
View File

@ -0,0 +1,359 @@
# bash
source $(dirname "${BASH_SOURCE[0]}")/utils.sh || exit 1
source $(dirname "${BASH_SOURCE[0]}")/glob_utils.sh || exit 1
# used by need_build() below
NEED_BUILD_PATTERNS=(
"! cgcs-root/stx/docs/*"
"! cgcs-root/stx/test/*"
"! cgcs-root/local-build-data/*"
"! cgcs-root/wrs/docs/*"
" cgcs-root/wrs/titanium-tools/docker-images/*" # used by *_SDK_Build
" cgcs-root/wrs/titanium-tools/lab/*"
"! cgcs-root/wrs/titanium-tools/*"
# " stx-tools/centos-mirror-tools/yum.conf.sample"
# " stx-tools/centos-mirror-tools/config/centos/$LAYER"
# "! stx-tools/*"
" cgcs-root/build-tools/build_iso/*.cfg"
" cgcs-root/build-tools/build_iso/minimal_rpm_list.txt"
" cgcs-root/build-tools/signing/*"
" cgcs-root/build-tools/certificats/*"
" cgcs-root/build-tools/build-docker-images/*"
" cgcs-root/build-tools/build-wheels/*"
"! cgcs-root/build-tools/*"
)
# Usage: ( cd $MY_REPO_ROOT_DIR && print_last_commits ; ) >LAST_COMMITS
print_last_commits() {
local dir padded_dir
for dir in ./.repo/manifests $( find . \( -path './.repo' -prune \) -o \( -xtype d -name .git -printf '%h\n' \) | sort ) ; do
pushd "$dir" >/dev/null || exit 1
padded_dir="$(printf "%-52s" "$dir")"
git log --pretty=tformat:"$padded_dir %H" -n 1 || exit 1
popd >/dev/null || exit 1
done
}
#
# Usage: diff_last_commits MY_WORKSPACE/../LAST_COMMITS [PATTERNS...]
#
# Diff git HEADs with a LAST_COMMITS file generated by another build
# Return true (0) if there are no differences
#
# PATTERNS may be used to filter out changes in some files when comparing
#
# If any file changed since LAST_COMMITs matches a PATTERN, we return
# false (ie changes detected & a rebuild is required). If it matches a
# "negative" pattern that begins with "!", we continue to the next file
# instead. Patterns are matched in order until a match is found. A
# combination of positive and negative patterns may be used to skip parts
# of the source tree.
#
# Patterns are similar to shell glob patterns, except "*" and "?" match
# any character including "/".
# Leading and trailing whitespace as well as leading "./" in patterns
# are not significant.
# Changes in files that didn't match any patterns are treated as positive
# matches.
#
# EXAMPLE:
#
# diff_last_commits $MY_WORKSPACE/../LAST_COMMITS \
# # detect changes in this file \
# "stx-tools/centos-mirror-tools/yum.conf.sample" \
# # ignore other changes under centos-mirror-tools \
# "!stx-tools/centos/mirror-tools/*" \
# # detect changes everywhere else (implied)
# "*"
#
diff_last_commits() {
local last_commits_file="$1" ; shift || :
local debug=2
local dir dir_regex
local last_commit
local commit_files_str
local -a commit_files
local file
local match pattern_expr pattern regex
# no previous builds: assume builds different
if [[ ! -f "$last_commits_file" ]] ; then
[[ "$debug" == 0 ]] || echo "## file \`$last_commits_file' doesn't exist: return false" >&2
return 1
fi
# find all gits
[[ "$debug" == 0 ]] || echo "## looking for diffs between \`$PWD' and \`$last_commits_file'" >&2
for dir in ./.repo/manifests $( find . \( -path './.repo' -prune \) -o \( -xtype d -name .git -printf '%h\n' \) | sort ) ; do
[[ "$debug" == 0 ]] || echo "## checking \`$dir'" >&2
if [[ "$dir" == "." ]] ; then
dir_prefix=""
else
dir_prefix="${dir#./}"/
fi
# find last commit for this dir
# create a regex LAST_COMMITS, eg: ./cgcs-root/stx/config => ^[.][/]cgcs-root[/]stx[/]config[^a-zA-Z0-9/_-]
dir_regex="$(echo "$dir" | sed \
-e 's:/:[/]:g' \
-e 's:$:[^a-zA-Z0-9/_-]:' \
-e 's:^[.][.]:^[.][.]:' \
-e 's:^[.]:^[.]:' \
)"
last_commit=$(grep "$dir_regex" "$last_commits_file" | awk ' { print $2 } ')
# it didn't exist in previous buid: assume builds different
if [[ -z "$last_commit" ]] ; then
[[ "$debug" == 0 ]] || echo "## $dir: not present in \`$last_commits_file': return false" >&2
return 1
fi
# get all files changed since last_commit
commit_files_str="$(cd "$dir" && git diff-tree --no-commit-id --name-only -r $last_commit..HEAD)" || exit 1
readarray -t commit_files < <(echo -n "$commit_files_str")
# check each file against PATTERNs
for file in "${commit_files[@]}" ; do
match=0
for pattern_expr in "$@" ; do
# convert glob pattern to regex
pattern="$(echo "$pattern_expr" | sed -r -e 's/^\s*[!]?\s*//' -e 's/\s*$//')"
regex="$(glob_to_basic_regex "$pattern")"
[[ "$debug" -lt 2 ]] || echo "## trying to match pattern \`$pattern' / regex \`$regex'" >&2
# check if the file matches
if echo "${dir_prefix}$file" | grep -q -E "$regex" >/dev/null || \
echo "$dir/$file" | grep -q -E "$regex" >/dev/null ; then
# pattern doesn't begin with "!": assume builds different
if ! echo "$pattern_expr" | grep -q -E '^\s*[!]' ; then
[[ "$debug" == 0 ]] || echo "## file \`${dir_prefix}$file' matched positive pattern \`$pattern_expr': return false" >&2
return 1
fi
# "!" pattern: continue to next file
[[ "$debug" == 0 ]] || echo "## file \`${dir_prefix}$file' matched negative pattern \`$pattern_expr': continue to next file" >&2
match=1
break
fi
done # for pattern_expr ...
if [[ $match == 0 ]] ; then
[[ "$debug" == 0 ]] || echo "## file \`${dir_prefix}$file' didn't match any negative patterns: return false" >&2
return 1
fi
done # for file ...
done # for dir ...
[[ "$debug" == 0 ]] || echo "## no diffs found: return true" >&2
return 0
}
#
# Usage: print_changelog LAST_COMMITS_FILE [DEFAULT_FROM_TIMESTAMP]
#
# Print out the change log since LAST_COMMITS_FILE.
#
# DEFAULT_FROM_TIMESTAMP will be used for repos missing from LAST_COMMITS_FILE
# and must be a date or date/time in ISO format. Defaults to the value of
# BUILD_TIMESTAMP global variable minus 1 day at midnight, or yesterday's midnight.
#
# LAST_COMMITS_FILE need not exist.
#
print_changelog() {
local last_commits_file="$1"
local default_from_timestamp
if [[ -n "$2" ]] ; then
default_from_timestamp="$2"
else
local build_date
build_date="${BUILD_TIMESTAMP:0:10}"
[[ -n "$build_date" ]] || build_date=$(date '+%Y-%m-%d') || return 1
default_from_timestamp="$(date --date="$build_date - 1 day" '+%Y-%m-%d 00:00:00')" || return 1
fi
local dir
for dir in ./.repo/manifests $( find . \( -path './.repo' -prune \) -o \( -xtype d -name .git -printf '%h\n' \) | sort ) ; do (
set -e
padded_dir="$(printf "%-52s" "$dir")"
commit=
if [[ -f "$last_commits_file" ]] ; then
# create a regex LAST_COMMITS, eg: ./cgcs-root/stx/config => ^[.][/]cgcs-root[/]stx[/]config[^a-zA-Z0-9/_-]
regex="$(echo "$dir" | sed \
-e 's:/:[/]:g' \
-e 's:$:[^a-zA-Z0-9/_-]:' \
-e 's:^[.][.]:^[.][.]:' \
-e 's:^[.]:^[.]:' \
)"
commit=$(grep "$regex" "$last_commits_file" | awk ' { print $2 } ')
fi
if [[ -n "$commit" ]] ; then
git_log_args=("$commit..")
else
git_log_args=(--after "$default_from_timestamp")
fi
pushd "$dir" >/dev/null
git log --date=iso --pretty=tformat:"$padded_dir %H %cd%x09%cn%x09%s" "${git_log_args[@]}"
popd >/dev/null
) ; done
}
#
# Usage: print_changelog_since TIMESTAMP
#
print_changelog_since() {
print_changelog "" "$1"
}
# Usage: create_standard_changelogs
#
# Create changelog files in $MY_WORKSPACE:
#
# CHANGELOG
# changes since LAST_COMMITS left by most recent successful build,
# used for rebuild calculations
#
# CHANGELOG.OLD
# changes since midnight of previous day (24-48 hours)
#
# CHANGELOG.IMG_DEV
# changes since LAST_COMMITS left by most recent dev images build
#
# CHANGELOG.IMG_STABLE
# changes since LAST_COMMITS left by most recent stable images build
#
# LAST_COMMITS
# SHA's of each git's HEAD
#
create_standard_changelogs() {
require_env "MY_REPO_ROOT_DIR" "MY_WORKSPACE"
local deploy_dir="${DEPLOY_DIR:-$MY_WORKSPACE/..}"
local changelog_file="$MY_WORKSPACE/CHANGELOG"
local build_date
build_date="${BUILD_TIMESTAMP:0:10}"
[[ -n "$build_date" ]] || build_date=$(date '+%Y-%m-%d') || return 1
local default_from_timestamp
default_from_timestamp="$(date --date="$build_date - 1 day" '+%Y-%m-%d 00:00:00')" || return 1
# CHANGELOG
echo "## creating $changelog_file (since last iso build)"
(
set -e
cd "$MY_REPO_ROOT_DIR"
print_changelog "$deploy_dir/LAST_COMMITS" "$default_from_timestamp"
) >"$changelog_file" || exit 1
# CHANGELOG.OLD
echo "## creating $changelog_file.OLD (since yesterday midnight)"
(
set -e
cd "$MY_REPO_ROOT_DIR"
print_changelog "" "$default_from_timestamp"
) >"$changelog_file.OLD" || exit 1
# CHNAGELOG.IMG_DEV
echo "## creating $changelog_file.IMG_DEV (since last dev images build)"
(
set -e
cd "$MY_REPO_ROOT_DIR"
print_changelog "$deploy_dir/LAST_COMMITS_IMG_DEV" "$default_from_timestamp"
) >"$changelog_file.IMG_DEV" || exit 1
# CHNAGELOG.IMG_STABLE
echo "## creating $changelog_file.IMG_STABLE (since last stable images build)"
(
set -e
cd "$MY_REPO_ROOT_DIR"
print_changelog "$deploy_dir/LAST_COMMITS_IMG_STABLE" "$default_from_timestamp"
) >"$changelog_file.IMG_STABLE" || exit 1
# LAST_COMMITS
(
set -e
cd "$MY_REPO_ROOT_DIR"
print_last_commits
) >"$MY_WORKSPACE/LAST_COMMITS" || exit 1
echo "## LAST_COMMITS" >&2
cat "$MY_WORKSPACE/LAST_COMMITS" >&2
echo "## END LAST_COMMITS" >&2
echo "## CHANGELOG" >&2
cat "$changelog_file" >&2
echo "## END CHANGELOG" >&2
}
# Usage: need_build [BUILD_DATE]
#
# Return true if build is required. BUILD_DATE defaults
# to BUILD_TIMESTAMP global var, or today's date.
#
# This will create either a NEED_BUID or NO_BUILD_REQUIRED file
# in MY_WORKSPACE.
#
# If any of these job parameters are set to true, this function returns true:
# FORCE_BUILD
# BUILD_DOCKER_IMAGES_DEV
# BUILD_DOCKER_IMAGES_STABLE
#
# These job parameters/env vars must be set to the week days when
# images should be built, eg:
#
# BUILD_DOCKER_IMAGES_DAYS_DEV=""
# BUILD_DOCKER_IMAGES_DAYS_STABLE="mon tue"
# FORCE_BUILD_DAYS="sat"
#
need_build() {
local build_date build_weekday build_reason
build_date="$1"
[[ -n "$build_date" ]] || build_date="${BUILD_TIMESTAMP:0:10}"
[[ -n "$build_date" ]] || build_date=$(date '+%Y-%m-%d') || return 1
build_weekday=$(get_weekday "$build_date") || exit 1
local deploy_dir="${DEPLOY_DIR:-$MY_WORKSPACE/..}"
require_env MY_WORKSPACE MY_REPO_ROOT_DIR FORCE_BUILD BUILD_DOCKER_IMAGES_DEV BUILD_DOCKER_IMAGES_STABLE
rm -f "$MY_WORKSPACE/NO_BUILD_REQUIRED" "$MY_WORKSPACE/NEED_BUILD" || exit 1
if $FORCE_BUILD ; then
build_reason="forced"
elif in_list "$build_weekday" $(normalize_weekdays $FORCE_BUILD_DAYS) ; then
build_reason="forced:schedule"
elif $BUILD_DOCKER_IMAGES_DEV ; then
build_reason="dev_images_forced"
elif $BUILD_DOCKER_IMAGES_STABLE ; then
build_reason="stable_images_forced"
elif ! diff_last_commits "$deploy_dir/LAST_COMMITS" "${NEED_BUILD_PATTERNS[@]}" ; then
build_reason="changes_detected"
elif in_list "$build_weekday" $(normalize_weekdays $BUILD_DOCKER_IMAGES_DAYS_DEV) && \
! diff_last_commits "$deploy_dir/LAST_COMMITS_IMG_DEV" "${NEED_BUILD_PATTERNS[@]}" ; then
build_reason="dev_images_changes_detected"
elif in_list "$build_weekday" $(normalize_weekdays $BUILD_DOCKER_IMAGES_DAYS_STABLE) && \
! diff_last_commits "$deploy_dir/LAST_COMMITS_IMG_STABLE" "${NEED_BUILD_PATTERNS[@]}" ; then
build_reason="stable_images_changes_detected"
else
touch "$MY_WORKSPACE/NO_BUILD_REQUIRED" || exit 1
echo "## No new content. Build not required."
return 1
fi
echo "REASON=$build_reason" >"$MY_WORKSPACE/NEED_BUILD" || exit 1
echo "## Build required ($build_reason)"
return 0
}
#
# Return true if the build is being forced:
# FORCE_BUILD is "true" -- OR
# FORCE_BUILD_DAYS matches the build time stamp
#
is_build_forced() {
[[ -f "$MY_WORKSPACE/NEED_BUILD" ]] || return 1
grep -E -q "^\\s*REASON=['\"]?forced:?" "$MY_WORKSPACE/NEED_BUILD" >/dev/null
}

141
lib/file_utils.sh Normal file
View File

@ -0,0 +1,141 @@
#
# Copyright (c) 2022 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
FD_SHA=0
FD_NAME=1
FD_INODE=2
FD_PATH=3
fu_debug () {
>&2 echo "DEBUG: ${1}"
}
fu_error () {
>&2 echo "ERROR: ${1}"
}
get_file_data_from_path () {
local path="${1}"
local sha=""
sha="$(sha256sum "${path}" | cut -d ' ' -f 1; return ${PIPESTATUS[0]})"
if [ $? -ne 0 ]; then
return 1
fi
echo "$sha $(basename ${path}) $(stat --format=%i ${path}) ${path}"
}
get_file_data_from_dir () {
local directory="${1}"
local list_file="${2}"
local d
local line
local fields
for d in $(find $directory -type d | grep -v 'repodata'); do
sha256sum $d/*.deb $d/*.rpm $d/*.tar $d/*.tgz $d/*.gz $d/*.bz2 $d/*.xz 2> /dev/null | \
while read line; do
fields=( $(echo $line) )
echo "${fields[0]} $(basename ${fields[1]}) $(stat --format=%i ${fields[1]}) ${fields[1]}"
done
done > ${list_file}.unsorted
sort ${list_file}.unsorted > ${list_file}
\rm -f ${list_file}.unsorted
}
is_merge_candidate () {
local array1=( ${1} )
local array2=( ${2} )
fu_debug "is_merge_candidate ${1}"
fu_debug " vs ${2}"
if [ "${array1[$FD_SHA]}" != "${array2[$FD_SHA]}" ]; then
fu_debug "shas differ"
return 1
elif [ "${array1[$FD_NAME]}" != "${array2[$FD_NAME]}" ]; then
fu_debug "names differ"
return 1
elif [ "${array1[$FD_INODE]}" = "${array2[$FD_INODE]}" ]; then
fu_debug "inodes already the same"
return 1
elif [ "${array1[$FD_FPATH]}" = "${array2[$FD_PATH]}" ]; then
fu_debug "paths already the same"
return 1
fi
fu_debug "merge candidates:"
fu_debug " ${array1[$FD_PATH]}"
fu_debug " ${array2[$FD_PATH]}"
return 0
}
cp_or_link () {
local src_file="${1}"
local dest_dir="${2}"
shift 2
local lst_files=( "${@}" )
local lst_file
local src_name
local lnk_line
local src_line
local lnk_array=()
local src_array=()
if [ ! -d "${dest_dir}" ]; then
fu_error "destination directory '${dest_dir}' not found"
return 1
fi
src_name=$(basename ${src_file})
src_line="$(get_file_data_from_path "${src_file}")" || return 1
src_array=( ${src_line} )
if [ -f "${dest_dir}/${src_name}" ]; then
lnk_line="$(get_file_data_from_path "${dest_dir}/${src_name}")" || return 1
lnk_array=( ${lnk_line} )
# echo "src_line=${src_line}"
# echo "lnk_line=${lnk_line}"
if [ "${lnk_array[$FD_SHA]}" == "${src_array[$FD_SHA]}" ]; then
echo "Already have ${src_name}"
return 0
fi
fu_error "destination file '${dest_dir}/${src_name}' already exists"
return 1
fi
for lst_file in "${lst_files[@]}"; do
fu_debug "grep '${src_name}' in '${lst_file}'"
grep "${src_name}" "${lst_file}" | \
while read lnk_line; do
if is_merge_candidate "$lnk_line" "$src_line" "${merge}" ; then
lnk_array=( ${lnk_line} )
fu_debug "ln ${lnk_array[$FD_PATH]} ${dest_dir}/${src_name}"
\ln ${lnk_array[$FD_PATH]} ${dest_dir}/${src_name}
if [ $? -ne 0 ]; then
fu_error "ln ${lnk_array[$FD_PATH]} ${dest_dir}/${src_name}"
return 0
fi
return 1
fi
done || return 0
done
fu_debug "cp $src_file ${dest_dir}/"
\cp $src_file ${dest_dir}/
}
__make_deb_repo () {
local root_dir="${1}"
pushd "${root_dir}" || return 1
# FIXME: Release file not valid
dpkg-scanpackages . /dev/null > Release
dpkg-scanpackages . /dev/null | gzip -9c > Packages.gz
popd
}

155
lib/glob_utils.sh Normal file
View File

@ -0,0 +1,155 @@
# bash
: ${_GLOB_UTILS_TEST:=0}
: ${_GLOB_UTILS_LOW_NOISE:=1}
#
# Convert a glob pattern to a basic (grep/sed) regex.
#
# This function doesn't treat "/" and "." specially.
#
glob_to_basic_regex() {
# disable "set -x" to reduce noise in jenkins jobs
if (
[[ "$_GLOB_UTILS_LOW_NOISE" == 1 ]] && \
shopt -po xtrace | grep -q -- -o >/dev/null
) 2>/dev/null
then
: "$FUNCNAME: disabling debug trace"
set +x
local restore_xtrace=1
else
local restore_xtrace=0
fi
local len="${#1}"
local i c c2
local range_start range_len
local neg_char
local res
for ((i=0; i<len; ++i)) ; do
c="${1:$i:1}"
# character range [...]
if [[ "$c" == '[' ]] ; then
# find end bracket
range_len=""
# check if its a negative range
# negative ranges start with "!" in glob and "^" in regex
let ++i
neg_char=
if [[ "${1:$i:1}" == '!' ]] ; then
let ++i
neg_char='^'
fi
# at this point i refers to the 1st char in range
# range can't be empty, so we need to skip
# this first char, then search for ']'
range_start=$i
for ((++i; i<len; ++i)) ; do
if [[ "${1:$i:1}" == ']' ]] ; then
let range_len=i-range_start
break
fi
done
# end bracket found: append the (possibly negative) range
if [[ -n "$range_len" ]] ; then
res+='['
res+="$neg_char"
res+="${1:$range_start:$range_len}"
res+=']'
let i=range_start+range_len
# end bracket not found: append '\['
else
res+='\]'
fi
continue
fi
# Backslash is an escape char in glob, but not in basic regex,
# except when followed by a meta character: * { etc
# Surround next char with "[]"
if [[ "$c" == '\' ]] ; then
let ++i
# backslash at end of string: append a literal '\'
if [[ $i -ge $len ]] ; then
c2='\'
else
c2="${1:$i:1}"
fi
# we can't use this method with '[^]'
if [[ "$c2" != '^' ]] ; then
res+="[$c2]"
continue
fi
res+='\^'
continue
fi
# Escape ^ as \^ -- can't use square brackets
# because this is a negation character in ranges
if [[ "$c" == '^' ]] ; then
res+='\^'
continue
fi
# Escape these using square brackets:
# $. - have special meaning in regex
# /,!|#@ - these are not special, but are frequently
# used as separators in sed "s" command
if [[ "$c" == '$' || "$c" == '.' ||
"$c" == '/' || "$c" == ',' || "$c" == '!' ||
"$c" == '|' || "$c" == '#' || "$c" == '@' ]] ; then
res+="[$c]"
continue
fi
# "?" => "."
if [[ "$c" == '?' ]] ; then
res+='.'
continue
fi
# "*" => ".*"
if [[ "$c" == '*' ]] ; then
res+='.*'
continue
fi
# anything else: append as is
res+="$c"
done
echo "^${res}\$"
if [[ "$restore_xtrace" == 1 ]] ; then
set -x # debug output of this "set" is suppressed
set -x # execute it again, this time with "-x" already on
: "$FUNCNAME: restored debug trace"
fi
}
# unit tests
if [[ "$_GLOB_UTILS_TEST" == 1 ]] ; then
expect() {
local glob="$1"
local expected="$2"
local actual
actual="$(glob_to_basic_regex "$1")"
if [[ "$actual" != "$expected" ]] ; then
echo "${BASH_SOURCE}:${BASH_LINENO}: glob_to_basic_regex '$glob': expected '$expected' actual '$actual'" >&2
exit 1
fi
}
expect 'a[0-9]b' '^a[0-9]b$'
expect 'a[!0-9]b' '^a[^0-9]b$'
expect 'a?b' '^a.b$'
expect 'a*b' '^a.*b$'
expect 'a\*b' '^a[*]b$'
expect 'a^b$c' '^a\^b[$]c$'
expect '/foo/*' '^[/]foo[/].*$'
expect 'a.b' '^a[.]b$'
expect 'a\[b' '^a[[]b$'
expect 'a[a-z]b[!A-Z]c[!0-9[]d!^' '^a[a-z]b[^A-Z]c[^0-9[]d[!]\^$'
expect 'abc\' '^abc[\]$'
expect 'a\b' '^a[b]$'
fi

761
lib/job_utils.sh Normal file
View File

@ -0,0 +1,761 @@
: ${LOADBUILD_ROOT:="/localdisk/loadbuild"}
: ${DESIGNER_ROOT:="/localdisk/designer"}
source "${BASH_SOURCE[0]%/*}"/utils.sh || return 1
source "${BASH_SOURCE[0]%/*}"/log_utils.sh || return 1
# Top-level source directory of jenkins scripts repo
TOP_SCRIPTS_DIR=$(readlink -f "${BASH_SOURCE[0]%/*}"/..)
# Library scripts dir
LIB_DIR="$TOP_SCRIPTS_DIR/lib"
# Scripts dir
SCRIPTS_DIR="$TOP_SCRIPTS_DIR/scripts"
# When true produce less noise
#QUIET=false
# Python 3.x executable
: ${PYTHON3:=python3}
# docker images
SAFE_RSYNC_DOCKER_IMG="servercontainers/rsync:3.1.3"
COREUTILS_DOCKER_IMG="debian:bullseye-20220509"
APT_UTILS_DOCKER_IMG="debian:bullseye-20220509"
notice() {
( set +x ; print_log -i --loud "$@" ; )
}
info() {
( set +x ; print_log -i --prefix ">>> " "$@" ; )
}
error() {
( set +x ; print_log -i --loud --dump-stack --location --prefix "ERROR: " "$@" ; )
}
warn() {
( set +x; print_log -i --prefix "WARNING: " --location "$@" ; )
}
die() {
( set +x ; print_log -i --loud --dump-stack --location --prefix "ERROR: " "$@" ; )
exit 1
}
bail() {
( set +x ; print_log -i --prefix ">>> " "$@" ; )
exit 0
}
trim() {
echo "$@" | sed -r -e 's/^\s+//' -e 's/\s+$//'
}
maybe_run() {
local cmd
local sep=''
local arg
for arg in "$@" ; do
cmd+="$sep" ; sep=' '
cmd+="$(printf '%q' "$arg")"
done
if $DRY_RUN ; then
echo "running (dry run): $cmd"
else
echo "running: $cmd"
"$@"
fi
}
#
# Usage: declare_env NAME [DFLT]
#
# Make sure the specified env var is defined & non-empty,
# otherwise it to a default value.
# Trim and export it in either case.
declare_env() {
local var="$1"
local dflt="$2"
# trim it
local val="$(trim "${!var}")"
# set to default
if [[ -z "$val" ]] ; then
val="$(trim "$dflt")"
declare -g -x "$var=$val"
return
fi
# export it
declare -g -x "$var"
}
#
# Usage: require_env NAME [DFLT]
#
# Same as declare_env, but fail & exit if the var is empty
require_env() {
local var="$1" ; shift || :
declare_env "$var" "$@"
[[ -n "${!var}" ]] || die "required variable \"$var\" is not set"
}
#
# Usage: require_file FILENAME
#
# Make sure file exists and is readable; die otherwise
#
require_file() {
: <"$1" || die "$1: couldn't open file file reading"
}
__set_common_vars() {
require_env BUILD_HOME
require_env TIMESTAMP
declare_env PUBLISH_TIMESTAMP "$TIMESTAMP"
declare_env DRY_RUN
# Set dry-run options
if [[ "$DRY_RUN" != "false" ]] ; then
DRY_RUN="true"
DRY_RUN_ARG="--dry-run"
else
DRY_RUN="false"
DRY_RUN_ARG=""
fi
export PATH="/usr/local/bin:$PATH"
}
__set_build_vars() {
# Set a few additional globals
REPO_ROOT_SUBDIR=localdisk/designer/$BUILD_USER/$PROJECT
WORKSPACE_ROOT_SUBDIR=localdisk/loadbuild/$BUILD_USER/$PROJECT
REPO_ROOT="$BUILD_HOME/repo"
WORKSPACE_ROOT="$BUILD_HOME/workspace"
USER_ID=$(id -u $BUILD_USER) || exit 1
BUILD_OUTPUT_HOME="$BUILD_OUTPUT_ROOT/$TIMESTAMP"
# publish vars
PUBLISH_DIR="${PUBLISH_ROOT}/${PUBLISH_TIMESTAMP}${PUBLISH_SUBDIR:+/$PUBLISH_SUBDIR}"
PUBLISH_URL="${PUBLISH_ROOT_URL}/${PUBLISH_TIMESTAMP}${PUBLISH_SUBDIR:+/$PUBLISH_SUBDIR}"
# parallel
if [[ -n "$PARALLEL_CMD" && "${PARALLEL_CMD_JOBS:-0}" -gt 0 ]] ; then
PARALLEL="$PARALLEL_CMD -j ${PARALLEL_CMD_JOBS}"
else
PARALLEL=
fi
}
__started_by_jenkins() {
[[ -n "$JENKINS_HOME" ]]
}
#
# Usage: load_build_config
#
# Source $BUILD_HOME/build.conf and set a few common globals
#
load_build_config() {
__set_common_vars || exit 1
source "$BUILD_HOME/build.conf" || exit 1
__set_build_vars || exit 1
}
#
# Usage: load_build_env
#
# Load $BUILD_HOME/build.conf and source stx tools env script
#
load_build_env() {
__set_common_vars || exit 1
require_file "$BUILD_HOME/build.conf" || exit 1
source "$BUILD_HOME/source_me.sh" || exit 1
__set_build_vars || exit 1
}
# Usage: stx_docker_cmd [--dry-run] SHELL_SNIPPET
stx_docker_cmd() {
local dry_run=0
if [[ "$1" == "--dry-run" ]] ; then
dry_run=1
shift
fi
if [[ "$QUIET" != "true" ]] ; then
echo ">>> running builder pod command:" >&2
echo "$1" | sed -r 's/^/\t/' >&2
fi
if [[ "$dry_run" -ne 1 ]] ; then
local -a args
if __started_by_jenkins ; then
args+=("--no-tty")
fi
stx -d shell "${args[@]}" -c "$1"
fi
}
# Usage: docker_login REGISTRY
# Login to docker in builder pod
docker_login() {
local reg="$1"
local login_arg
if [[ "$reg" != "docker.io" ]] ; then
login_arg="$reg"
fi
stx_docker_cmd "docker login $login_arg </dev/null"
}
#
# Usage: parse_docker_registry REGISTRY[/NAMESPACE]
#
# Parse a registry name and print the registry and the namespace
# separated by a space. Print an error and return non-zero
# if the registry string is invalid.
#
# Examples:
# parse_docker_registry foo # ERROR
# parse_docker_registry foo/bar # ERROR
# parse_docker_registry foo.com/bar///baz # foo.com bar/baz
#
parse_docker_registry() {
local spec="$1"
local registry namespace
# up to 1st slash
registry="$(echo "$spec" | sed 's!/.*!!' || :)"
# remove double-shashes & extract everything past the 1st slash
namespace="$(echo "$spec" | sed -e 's!//*!/!g' | sed -n -e 's!^[^/]*/\(.*\)!\1!p' || :)"
# registry must contain a dot or a colon to distinguish it from a local namespace
if ! { echo "$registry" | grep -q -E "[.:]" ; } ||
! { echo "$registry" | grep -q -E "^[a-zA-Z0-9._-]+(:[0-9]{1,5})?$" ; } ; then
error "invalid docker registry spec \"$spec\""
return 1
fi
echo $registry $namespace
}
__get_protected_dirs() {
[[ -n "$USER" ]] || die "USER not set"
[[ -n "$PROJECT" ]] || die "PROJECT not set"
echo "$DESIGNER_ROOT:ro"
echo "$LOADBUILD_ROOT:ro"
echo "$DESIGNER_ROOT/$USER/$PROJECT"
echo "$LOADBUILD_ROOT/$USER/$PROJECT"
}
#
# Usage: __ensure_dirs_within_protected_set PROTECTED_DIRS... -- DIRS...
# Make sure wach DIR equals or starts with any of PROTECTED_DIRS
#
__ensure_dirs_within_protected_set() {
local -a protected_dirs
while [[ "$#" -gt 0 && "$1" != "--" ]] ; do
protected_dirs+=("$1")
dir="$1"
shift
done
shift || true
while [[ "$#" -gt 0 ]] ; do
local dir="$1" ; shift || true
if ! echo "$dir" | grep -q '^/' ; then
error -i "$dir: directories must be absolute"
return 1
fi
# check if $dir under any of $protected_dirs
local safe=0
local parent_dir
for protected_dir in "${protected_dirs[@]}" ; do
protected_dir="${protected_dir%%:*}"
if [[ "$dir" == "$protected_dir" || "${dir#$protected_dir/}" != "${dir}" ]] ; then
safe=1
break
fi
done
if [[ $safe != 1 ]] ; then
error -i "attempted to operate on an unsafe directory \"$dir\""
return 1
fi
done
}
#
# Usage: __ensure_dir_not_blacklisted_for_writing [--skip-missing] PATH...
#
__ensure_dir_not_blacklisted_for_writing() {
local -a blacklist_dir_list=(
"/"
)
local -a blacklist_prefix_list=(
"/usr/"
"/etc/"
"/var/"
"/run/"
"/proc/"
"/sys/"
"/boot/"
"/dev/"
"/media/"
"/mnt/"
"/proc/"
"/net/"
"/sys/"
)
local skip_missing=0
if [[ "$1" == "--skip-missing" ]] ; then
skip_missing=1
shift
fi
local dir
for dir in "$@" ; do
local abs_dir
if ! abs_dir="$(readlink -f "$dir")" ; then
if [[ $skip_missing -eq 1 ]] ; then
continue
fi
error -i "$dir: does not exist or is not readable"
return 1
fi
#if [[ ! -w "$abs_dir" ]] ; then
# error -i "$dir: not writable"
# return 1
#fi
if in_list "$abs_dir" "${blacklist_dir_list}" || \
starts_with "$abs_dir" "${blacklist_prefix_list}" ; then
error -i "$dir: is blacklisted for writing"
return 1
fi
done
}
#
# Usage: __safe_docker_run [--dry-run] PROTECTED_DIRS... -- <DOCKER RUN OPTIONS>
#
__safe_docker_run() {
local loc="${BASH_SOURCE[0]}(${BASH_LINENO[0]}): ${FUNCNAME[0]}: "
local dry_run=0
local dry_run_prefix
if [[ "$1" == "--dry-run" ]] ; then
dry_run=1
dry_run_prefix="(dry_run) "
shift || true
fi
# construct mount options
local -a mount_opts
while [[ "$#" -gt 0 && "$1" != "--" ]] ; do
local dir="$1" ; shift
local extra_mount_str=""
if echo "$dir" | grep -q : ; then
local opt
local -a extra_mount_opts
for opt in $(echo "$dir" | sed -e 's/.*://' -e 's/,/ /g') ; do
if [[ "$opt" == "ro" ]] ; then
extra_mount_str+=",ro"
continue
fi
error -i "invalid mount option \"$opt\""
return 1
done
dir="${dir%%:*}"
fi
mount_opts+=("--mount" "type=bind,src=$dir,dst=$dir""$extra_mount_str")
done
shift || true
if [[ "$QUIET" != "true" ]] ; then
echo ">>> ${dry_run_prefix}running: docker run ${mount_opts[@]} $@" >&2
fi
if [[ $dry_run -ne 1 ]] ; then
local docker_opts=("-i")
if [[ -t 0 ]] ; then
docker_opts+=("-t")
fi
docker run "${docker_opts[@]}" "${mount_opts[@]}" "$@"
fi
}
#
# Usage: safe_docker_run <DOCKER RUN OPTIONS>
# Run a docker container with safe/protected dirs mounted
#
safe_docker_run() {
local -a protected_dirs
local protected_dirs_str
protected_dirs_str="$(__get_protected_dirs)" || return 1
readarray -t protected_dirs <<<"$(echo -n "$protected_dirs_str")" || return 1
__safe_docker_run "${protected_dirs[@]}" -- "$@"
}
#
# Usage:
# safe_copy_dir [--exclude PATTERN ...]
# [--include PATTERN ...]
# [--delete]
# [--chown USER:GROUP]
# [--dry-run]
# [-v | --verbose]
# SRC_DIR... DST_DIR
#
safe_copy_dir() {
local usage_msg="
Usage: ${FUNCNAME[0]} [OPTIONS...] SRC_DIR... DST_DIR
"
# get protected dirs
local -a protected_dirs
local protected_dirs_str
protected_dirs_str="$(__get_protected_dirs)" || return 1
readarray -t protected_dirs <<<"$(echo -n "$protected_dirs_str")" || return 1
# parse command line
local opts
local -a rsync_opts
local dry_run_arg=
opts=$(getopt -n "${FUNCNAME[0]}" -o "v" -l exclude:,include:,delete,chown:,dry-run,verbose -- "$@")
[[ $? -eq 0 ]] || return 1
eval set -- "${opts}"
while true ; do
case "$1" in
--exclude)
rsync_opts+=("--exclude" "$2")
shift 2
;;
--include)
rsync_opts+=("--include" "$2")
shift 2
;;
--delete)
rsync_opts+=("--delete-after")
shift
;;
--dry-run)
dry_run_arg="--dry-run"
shift
;;
--chown)
rsync_opts+=("--chown" "$2")
shift 2
;;
-v | --verbose)
rsync_opts+=("--verbose")
shift
;;
--)
shift
break
;;
-*)
error --epilog="$usage_msg" "invalid options"
return 1
;;
*)
break
;;
esac
done
if [[ "$#" -lt 2 ]] ; then
error --epilog="$usage_msg" "invalid options"
return 1
fi
local dst_dir="${@:$#:1}"
# make sure dirs start with a known prefix
__ensure_dirs_within_protected_set "${protected_dirs[@]}" -- "$@" || return 1
# make sure last destination dir is writeable
__ensure_dir_not_blacklisted_for_writing "${dst_dir}"
# run rsync in docker, filter out noisy greetings
rsync_opts+=(--archive --devices --specials --hard-links --recursive --one-file-system)
__safe_docker_run $dry_run_arg "${protected_dirs[@]}" -- --rm "$SAFE_RSYNC_DOCKER_IMG" rsync "${rsync_opts[@]}" "$@"
if [[ ${PIPSTATUS[0]} -ne 0 ]] ; then
error "failed to copy files"
return 1
fi
}
#
# Usage: safe_rm [OPTIONS...] PATHS
#
safe_rm() {
local usage_msg="
Usage: ${FUNCNAME[0]} [OPTIONS...] PATHS...
--dry-run
-v,--verbose
"
# get protected dirs
local -a protected_dirs
local protected_dirs_str
protected_dirs_str="$(__get_protected_dirs)" || return 1
readarray -t protected_dirs <<<"$(echo -n "$protected_dirs_str")" || return 1
# parse command line
local opts
local -a rm_opts
local -a rm_cmd=("rm")
opts=$(getopt -n "${FUNCNAME[0]}" -o "v" -l dry-run,verbose -- "$@")
[[ $? -eq 0 ]] || return 1
eval set -- "${opts}"
while true ; do
case "$1" in
--dry-run)
rm_cmd=("echo" "(dry run)" "rm")
shift
;;
-v | --verbose)
rm_opts+=("--verbose")
shift
;;
--)
shift
break
;;
-*)
error --epilog="$usage_msg" "invalid options"
return 1
;;
*)
break
;;
esac
done
if [[ "$#" -lt 1 ]] ; then
error --epilog="$usage_msg" "invalid options"
return 1
fi
__ensure_dirs_within_protected_set "${protected_dirs[@]}" -- "$@" || return 1
__ensure_dir_not_blacklisted_for_writing --skip-missing "$@"
# run rsync in docker
rm_opts+=(--one-file-system --preserve-root --recursive --force)
info "removing $*"
if ! __safe_docker_run "${protected_dirs[@]}" -- --rm "$COREUTILS_DOCKER_IMG" "${rm_cmd[@]}" "${rm_opts[@]}" -- "$@" ; then
error "failed to remove files"
return 1
fi
}
#
# Usage: safe_chown OPTIONS USER[:GROUP] PATHS...
safe_chown() {
local usage_msg="
Usage: ${FUNCNAME[0]} [OPTIONS...] USER[:GROUP] PATHS...
--dry-run
-v,--verbose
-R,--recursive
"
# get protected dirs
local -a protected_dirs
local protected_dirs_str
protected_dirs_str="$(__get_protected_dirs)" || return 1
readarray -t protected_dirs <<<"$(echo -n "$protected_dirs_str")" || return 1
# parse command line
local cmd_args
local dry_run_arg
local -a cmd=("chown")
opts=$(getopt -n "${FUNCNAME[0]}" -o "vR" -l dry-run,verbose,recursive -- "$@")
[[ $? -eq 0 ]] || return 1
eval set -- "${opts}"
while true ; do
case "$1" in
--dry-run)
dry_run_arg="--dry-run"
shift
;;
-v | --verbose)
cmd_args+=("--verbose")
shift
;;
-R | --recursive)
cmd_args+=("--recursive")
shift
;;
--)
shift
break
;;
-*)
error --epilog="$usage_msg" "invalid options"
return 1
;;
*)
break
;;
esac
done
if [[ "$#" -lt 2 ]] ; then
error --epilog="$usage_msg" "invalid options"
return 1
fi
local user_group="$1" ; shift
__ensure_dirs_within_protected_set "${protected_dirs[@]}" -- "$@" || return 1
__ensure_dir_not_blacklisted_for_writing --skip-missing "$@"
# resolve USER:GROUP to UID:GID
local uid_gid
uid_gid=$(
gid_suffix=
user="${user_group%%:*}"
if echo "$user_group" | grep -q ":" ; then
group="${user_group#*:}"
if [[ -n "$group" ]] ; then
gid=$(getent "$group" | awk -F ':' '{print $3}')
[[ -n "$gid" ]] || exit 1
fi
gid=$(id -g $user) || exit 1
gid_suffix=":$gid"
fi
uid=$(id -u $user) || exit 1
echo "${uid}${gid_suffix}"
) || {
error "unable to resolve owner $user_group"
return 1
}
if ! __safe_docker_run $dry_run_arg "${protected_dirs[@]}" -- --rm "$COREUTILS_DOCKER_IMG" \
"${cmd[@]}" "${cmd_args[@]}" -- "$uid_gid" "$@" ; then
error "failed to change file ownership"
return 1
fi
}
# Usage: gen_deb_repo_meta_data [--origin=ORIGIN] [--label=LABEL] DIR
make_deb_repo() {
local origin
local label
while [[ "$#" -gt 0 ]] ; do
case "$1" in
--origin=*)
origin="${1#--origin=}"
shift
;;
--label=*)
label="${1#--label=}"
shift
;;
*)
break
;;
esac
done
local dir="$1"
(
set -e
cd "$dir"
rm -f Packages Packages.gz
(
set -e
dpkg-scanpackages -t deb -t deb --multiversion .
dpkg-scanpackages -t deb -t udeb --multiversion .
) >Packages
gzip -c Packages >Packages.gz
__print_deb_release "$origin" "$label" >Release.tmp
mv -f Release.tmp Release
rm -f Packages
)
}
__print_deb_release_checksums() {
local section="$1"
local checksum_prog="$2"
local body
local files="Packages"
body="$(
set -e
for base in Packages ; do
for file in "$base" "${base}.gz" "${base}.xz" "${base}.bz2" ; do
if [[ -f "$file" ]] ; then
checksum=$($checksum_prog "$file" | awk '{print $1}' ; check_pipe_status) || exit 1
size=$(stat --format '%s' "$file") || exit 1
printf ' %s %16d %s\n' "$checksum" "$size" "$file"
fi
done
done
)" || return 1
if [[ -n "$body" ]] ; then
echo "${section}:"
echo "${body}"
fi
}
__print_deb_release() {
local origin="$1"
local label="$2"
local now
# Date: ...
now="$(date --rfc-2822 --utc)" || return 1
echo "Date: $now"
# Origin: ...
if [[ -n "$origin" ]] ; then
echo "Origin: $origin"
fi
# Label: ...
if [[ -n "$label" ]] ; then
echo "Label: $label"
fi
# <checksums>
__print_deb_release_checksums "MD5Sum" "md5sum" || return 1
__print_deb_release_checksums "SHA1" "sha1sum" || return 1
__print_deb_release_checksums "SHA256" "sha256sum" || return 1
__print_deb_release_checksums "SHA512" "sha512sum" || return 1
}
#gen_deb_repo_meta_data() {
# local dry_run=0
# local dry_run_cmd
# if [[ "$1" == "--dry-run" ]] ; then
# dry_run=1 ; shift || true
# dry_run_cmd="echo >>> (dry run): "
# fi
# local dir="$1"
# __ensure_dir_not_blacklisted_for_writing "$dir"
# $dry_run_cmd cp \
# "$SCRIPTS_DIR/helpers/create-deb-meta-priv.sh" \
# "$SCRIPTS_DIR/helpers/create-deb-meta.sh" \
# "$dir/"
# $dry_run_cmd chmod +x "$dir/create-deb-meta-priv.sh" "$dir/create-deb-meta-priv.sh"
# local now
# now="$(date -R)" || return 1
# local -a docker_run=(
# __safe_docker_run
# --
# --rm
# --mount "type=bind,src=$dir,dst=$dir"
# -w "$dir"
# -e "__REALLY_RUN_ME=1"
# -e "NOW=$now"
# "$APT_UTILS_DOCKER_IMG"
# /bin/bash
# -c
# )
# local rv=0
# $dry_run_cmd "${docker_run[@]}" "./create-deb-meta-priv.sh $(id -u) $(id -g)" || rv=1
# $dry_run_cmd rm -f "$dir/create-deb-meta-priv.sh"
# $dry_run_cmd rm -f "$dir/create-deb-meta.sh"
# $dry_run_cmd rm -rf "$dir/cache"
# if [[ $rv -ne 0 ]] ; then
# error "failed to generate meta data in $dir"
# return 1
# fi
# return 0
#}

153
lib/log_utils.sh Normal file
View File

@ -0,0 +1,153 @@
# bash
if [[ -z "$__LOG_UTILS_INCLUDED" ]] ; then
__LOG_UTILS_INCLUDED=1
#
# Usage: dump_stack [FRAME_OFFSET]
#
dump_stack() {
local -i index
local -i max_index="${#BASH_SOURCE[@]}"
local -a line_numbers=($LINENO "${BASH_LINENO[@]}")
for ((index=${1:-0}; index < max_index; ++index)) {
echo " at ${BASH_SOURCE[$index]} line ${line_numbers[$index]}"
}
}
#
# Usage: print_log [OPTIONS...] LINES...
#
# Print a log message -- LINES... followed by RAW_LINES...
#
# --dump-stack include stack trace in output
# --frame-offset=N frame offset for stack trace (default: 0)
# --prefix=PREFIX include PREFIX in front of each LINE
# --location include caller function name in front of each LINE
# --epilog=EPILOG include EPILOG in output
# --loud be loud
# -i,--increment-frame-offset
# add one to frame-offset (additive)
#
__print_log_usage() {
local func="${FUNCNAME[1]}"
echo "
################################################################################
ERROR: ${func}: invalid syntax
$(dump_stack 2)
Usage: $func [OPTIONS...] LINES... RAW_LINES...
See ${BASH_SOURCE[0]} near line ${LINENO} for more info.
################################################################################
"
}
print_log() {
(
set +x
local -i frame_offset=1
local -i frame_offset_offset=0
local -i dump_stack_frame_offset
local -i dump_stack=0
local line_prefix
local epilog
local -i include_location=0
local loud_prefix loud_suffix
local loud_line_prefix
# parse command line
local opts
local -a rsync_opts
opts="$(\
getopt -n "${FUNCNAME[0]}" -o "+i" \
-l dump-stack \
-l frame-offset: \
-l increment-frame-offset \
-l prefix: \
-l location\
-l epilog: \
-l loud \
-- "$@"
)"
if [[ $? -ne 0 ]] ; then
__print_log_usage
exit 1
fi
eval set -- "${opts}"
while true ; do
case "$1" in
--dump-stack)
dump_stack=1
shift
;;
--frame-offset)
frame_offset="$2"
shift 2
;;
-i | --increment-frame-offset)
let ++frame_offset_offset
shift
;;
--prefix)
line_prefix="$2"
shift 2
;;
--location)
include_location=1
shift
;;
--epilog)
epilog="$2"
shift 2
;;
--loud)
local nl=$'\n'
loud_line_prefix=$'### '
loud_prefix="${nl}${nl}${loud_line_prefix}${nl}"
loud_suffix="${loud_line_prefix}${nl}${nl}"
shift
;;
--)
shift
break
;;
-*)
__print_log_usage
exit 1
;;
*)
break
;;
esac
done
if [[ "$#" -lt 1 ]] ; then
__print_log_usage
exit 1
fi
let frame_offset+=frame_offset_offset
local location
if [[ $include_location -eq 1 ]] ; then
local -i funcname_index=$frame_offset
location="${FUNCNAME[$funcname_index]}: "
fi
echo -n "$loud_prefix"
while [[ "$#" -gt 0 ]] ; do
local line="$1" ; shift || true
echo "${location}${loud_line_prefix}${line_prefix}${line}"
done
shift || true
if [[ $dump_stack -eq 1 ]] ; then
let dump_stack_frame_offset=frame_offset+1
dump_stack $dump_stack_frame_offset
fi
if [[ -n "$epilog" ]] ; then
echo -n "$epilog"
fi
echo -n "$loud_suffix"
) >&2
}
fi # include guard

106
lib/publish_utils.sh Normal file
View File

@ -0,0 +1,106 @@
CHECKSUMS_FILENAME="StxChecksums"
publish_file() {
local filename="$1"
local dst_dir="$2"
local published_checksum_files_list_file="$3"
mkdir -p "$dst_dir" || exit 1
local basename
basename="${filename##*/}"
local dst_file
dst_file="$dst_dir/$basename"
local checksum
checksum=$(sha256sum "$filename" | awk '{print $1}' ; check_pipe_status) || exit 1
# find an existing file in $combined_checksums_file that we can
# hardlink to
local link_created
link_created=$(
cat "$published_checksum_files_list_file" | {
while read checksum_file ; do
local checksum_dir
checksum_dir="${checksum_file%/*}"
link_created=$(
\grep "^$checksum " "$checksum_file" \
| while read x_checksum x_basename x_size x_mtime x_device x_inode x_path ; do
x_filename="$checksum_dir/$x_basename"
if [[ ! -f "$x_filename" || -z "$x_basename" || -z "$x_size" || -z "$x_mtime" || -z "$x_device" || -z "$x_inode" || -z "$x_path" ]] ; then
continue
fi
x_recheck_stat=$(stat --printf '%s %Y %d %i' "$x_filename") || continue
#echo ">>> $x_recheck_stat $x_size $x_mtime $x_device $x_inode" >&2
[[ "$x_recheck_stat" == "$x_size $x_mtime $x_device $x_inode" ]] || continue
# try to link it
if \ln -f "$x_filename" "$dst_file" 2>&1 ; then
echo "LINK $dst_file" >&2
echo "link_created"
fi
cat >/dev/null # read and discard remaining lines to avoid SIGPIPE
exit 0
done
) || exit 1
if [[ "$link_created" == "link_created" ]] ; then
echo "link_created"
cat >/dev/null # read and discard remaining lines to avoid SIGPIPE
exit 0
fi
done
}
check_pipe_status || exit 1
)
check_pipe_status || exit 1
# try to link source file to destination
if [[ "$link_created" != "link_created" ]] ; then
link_created=$(
if \ln -f "$filename" "$dst_file" 2>&1 ; then
echo "LINK $dst_file" >&2
echo "link_created"
fi
) || exit 1
fi
# if all else fails, copy it
if [[ "$link_created" != "link_created" ]] ; then
\cp -f --preserve=mode,timestamps,xattr "$filename" "$dst_file" || exit 1
echo "COPY $dst_file" >&2
fi
# output published file info + source path
local -a stat
local size device inode mtime
stat=($(stat --printf '%s %Y %d %i' "$dst_file")) || exit 1
size="${stat[0]}"
mtime="${stat[1]}"
device="${stat[2]}"
inode="${stat[3]}"
echo "$checksum $basename $size $mtime $device $inode $filename"
}
find_publish_dirs() {
find "$PUBLISH_ROOT" -mindepth 1 -maxdepth 1 \
-type d \
-name '[0-9][0-9][0-9][0-9]*' \
-not -name "$PUBLISH_TIMESTAMP"
}
find_checksum_files() {
find_publish_dirs | while read dir ; do
for subdir in "$@" ; do
if [[ -d "$dir/$subdir" ]] ; then
find "$dir/$subdir" -type f -name "$CHECKSUMS_FILENAME"
fi
done
done
check_pipe_status || exit 1
}

59
lib/retries.sh Normal file
View File

@ -0,0 +1,59 @@
#!/bin/bash
#
# Copyright (c) 2020 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
#
# Utilities to retry commands
#
function with_retries {
local max_attempts=$1
local delay=$2
local cmd=$3
# Pop the first two arguments off the list,
# so we can pass additional args to the command safely
shift 3
local -i attempt=0
while :; do
attempt=$((attempt+1))
>&2 echo "Running: ${cmd} $@"
# ok, this is an obscure one ...
# ${cmd}
# ... alone risks setting of bash's 'set -e',
# So I need to hide the error code using a pipe
# with a final commane that returns true.
# original implementation was ...
# ${cmd} "$@" | true
# ... but this sometimes yields a ${PIPESTATUS[0]} of 141
# if ${cmd} is still writing to stdout when 'true' exits.
# Instead I use 'tee' to consume everything ${cmd} sends to stdout.
${cmd} "$@" | tee /dev/null
if [ ${PIPESTATUS[0]} -eq 0 ]; then
return 0
fi
>&2 echo "Command (${cmd}) failed, attempt ${attempt} of ${max_attempts}."
if [ ${attempt} -lt ${max_attempts} ]; then
>&2 echo "Waiting ${delay} seconds before retrying..."
sleep ${delay}
continue
else
>&2 echo "Max command attempts reached. Aborting..."
return 1
fi
done
}
function with_default_retries {
local cmd=$1
shift 1
with_retries ${RETRIES:-1} ${RETRY_INTERVAL_SEC:-1} "${cmd}" "$@"
}

77
lib/utils.sh Normal file
View File

@ -0,0 +1,77 @@
# bash
in_list() {
local s="$1" ; shift || :
while [[ "$#" -gt 0 ]] ; do
if [[ "$s" == "$1" ]] ; then
return 0
fi
shift
done
return 1
}
get_weekday() {
local date
if [[ "$#" -gt 0 ]] ; then
date="${1:0:10}"
else
date="today"
fi
date --date="$date" '+%a' | tr 'A-Z' 'a-z'
[[ ${PIPESTATUS[0]} -eq 0 ]]
}
normalize_weekdays() {
local day
for day in "$@" ; do
day="${day,,}"
case "$day" in
sun|sunday) day=sun ;;
mon|monday) day=mon ;;
tue|tuesday) day=tue ;;
wed|wednesday) day=wed ;;
thu|thursday) day=thu ;;
fri|friday) day=fri ;;
sat|saturday) day=sat ;;
*)
echo "$FUNCNAME: invalid week day \`$day'" >&2
return 1
;;
esac
echo -n "$day "
done
echo
}
require_env() {
while [[ "$#" -gt 0 ]] ; do
if [[ -z "${!1}" ]] ; then
echo "${FUNCNAME[1]}: required env var \`$1' not set" >&2
exit 1
fi
shift
done
}
# Usage: starts_with STR PREFIX...
# Return true (0) if STR starts with any of PREFIX strings
starts_with() {
local str="$1" ; shift || true
while [[ "$#" -gt 0 ]] ; do
prefix="$1" ; shift || true
if [[ "${str#$prefix}" != "$str" ]] ; then
return 0
fi
done
return 1
}
check_pipe_status() {
local -a pipestatus=(${PIPESTATUS[*]})
local -i i
for ((i=0; i<${#pipestatus[*]}; ++i)) ; do
[[ "${pipestatus[$i]}" -eq 0 ]] || return 1
done
return 0
}

View File

@ -0,0 +1,219 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
//def parseProps(text) {
// def x = {}
// for (line in text.split (/\n+/)) {
// if (line.matches (/\s*(?:#.*)?#/)) {
// continue
// }
// parts = line.split ("=", 2)
// key = parts[0]
// value = parts[1]
// x."${key}" = value
// }
// return x
//}
def loadEnv() {
def data = {}
ws(params.BUILD_HOME) {
if (fileExists ("NEED_BUILD")) {
data.NEED_BUILD = true
}
}
return data
}
def PROPS = null
def partJobName (name) {
final String folder = env.JOB_NAME.replaceAll (/(.*\/).+$/, '$1');
if (folder == env.JOB_NAME) {
error "This job must be in a Jenkins folder!"
}
return "/" + folder + "parts/" + name
}
def runPart (name, params = []) {
build job: partJobName (name), parameters: copyCurrentParams() + params
}
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP',
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'REBUILD_BUILDER_IMAGES'
)
booleanParam (
name: 'REFRESH_SOURCE'
)
booleanParam (
name: 'BUILD_PACKAGES'
)
string (
name: 'BUILD_PACKAGES_LIST'
)
booleanParam (
name: 'BUILD_ISO'
)
booleanParam (
name: 'BUILD_RT'
)
booleanParam (
name: 'DRY_RUN'
)
booleanParam (
name: 'CLEAN_PACKAGES'
)
booleanParam (
name: 'CLEAN_ISO'
)
booleanParam (
name: 'CLEAN_REPOMGR'
)
booleanParam (
name: 'CLEAN_DOWNLOADS'
)
booleanParam (
name: 'CLEAN_DOCKER'
)
booleanParam (
name: 'FORCE_BUILD'
)
booleanParam (
name: 'FORCE_BUILD_WHEELS'
)
string (
name: 'DOCKER_IMAGE_LIST'
)
booleanParam (
name: 'BUILD_DOCKER_IMAGES'
)
booleanParam (
name: 'PUSH_DOCKER_IMAGES'
)
booleanParam (
name: 'IMPORT_BUILD'
)
string (
name: 'IMPORT_BUILD_DIR'
)
booleanParam (
name: 'USE_DOCKER_CACHE',
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
}
stages {
stage('INIT') {
steps {
script {
runPart ("init-env")
runPart ("stop-containers")
runPart ("clone-source")
runPart ("create-changelog")
PROPS = loadEnv()
if (!PROPS.NEED_BUILD) {
println "*** NO CHANGES, BUILD NOT REQUIRED ***"
}
}
}
}
stage('X0') {
when { expression { PROPS.NEED_BUILD } }
stages {
stage('PREPARE') {
steps {
runPart ("clean-build")
runPart ("configure-build")
runPart ("start-containers")
runPart ("docker-login")
}
}
stage('DOWNLOAD') {
steps {
runPart ("download-prerequisites")
}
}
stage('PACKAGES') {
when { expression { params.BUILD_PACKAGES } }
steps {
runPart ("build-packages")
runPart ("publish-packages")
}
}
stage('X1') { parallel {
stage('ISO') {
when { expression { params.BUILD_ISO } }
steps {
runPart ("build-iso")
runPart ("publish-iso")
}
} // stage('ISO')
stage('IMAGES') {
when { expression { params.BUILD_DOCKER_IMAGES } }
steps { script {
imageParams = [ string (name: 'BUILD_STREAM', value: 'stable') ]
runPart ("build-wheels", imageParams)
runPart ("publish-wheels", imageParams)
runPart ("build-docker-base", imageParams)
runPart ("build-docker-images", imageParams)
runPart ("publish-docker-images", imageParams)
runPart ("build-helm-charts", imageParams)
runPart ("publish-helm-charts", imageParams)
} }
} // stage('IMAGES')
} }// stage('X1')
} // stages
post {
always {
runPart ("stop-containers")
notAborted {
runPart ("archive-misc")
}
}
success {
sh ("BUILD_STATUS=success ${Constants.SCRIPTS_DIR}/record-build-status.sh")
}
unsuccessful {
sh ("BUILD_STATUS=fail ${Constants.SCRIPTS_DIR}/record-build-status.sh")
}
}
} // stage X0
} // stages
post {
cleanup {
saveCurrentJenkinsBuildInfo()
notAborted {
runPart ("publish-logs")
}
}
}
}

View File

@ -0,0 +1,48 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'DRY_RUN'
)
}
stages {
stage ("archive-misc") {
steps {
sh ("${Constants.SCRIPTS_DIR}/archive-misc.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,50 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
booleanParam (
name: 'DRY_RUN'
)
booleanParam (
name: 'PUSH_DOCKER_IMAGES',
)
booleanParam (
name: 'USE_DOCKER_CACHE',
)
string (
name: 'BUILD_STREAM'
)
}
stages {
stage ("build-docker-base") {
steps {
sh ("${Constants.SCRIPTS_DIR}/build-docker-base.sh")
}
}
}
post {
cleanup {
cleanupPartJob (logLabel: params.BUILD_STREAM)
}
}
}

View File

@ -0,0 +1,58 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
booleanParam (
name: 'DRY_RUN'
)
booleanParam (
name: 'PUSH_DOCKER_IMAGES',
)
booleanParam (
name: 'USE_DOCKER_CACHE',
)
string (
name: 'BUILD_STREAM'
)
string (
name: 'DOCKER_IMAGE_LIST'
)
}
stages {
stage ("build-docker-images") {
steps {
sh ("${Constants.SCRIPTS_DIR}/build-docker-images.sh")
}
}
}
post {
always {
notAborted {
sh ("${Constants.SCRIPTS_DIR}/archive-docker-images.sh")
}
}
cleanup {
cleanupPartJob (logLabel: params.BUILD_STREAM)
}
}
}

View File

@ -0,0 +1,46 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
booleanParam (
name: 'DRY_RUN'
)
}
stages {
stage ("build-helm-charts") {
steps {
sh ("${Constants.SCRIPTS_DIR}/build-helm-charts.sh")
}
}
}
post {
always {
notAborted {
sh ("${Constants.SCRIPTS_DIR}/archive-helm-charts.sh")
}
}
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,60 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'DRY_RUN'
)
booleanParam (
name: 'BUILD_ISO'
)
}
stages {
stage ("build-iso") {
steps {
sh ("${Constants.SCRIPTS_DIR}/build-iso.sh")
}
}
stage ("sign-iso") {
steps {
sh ("${Constants.SCRIPTS_DIR}/sign-iso.sh")
}
}
}
post {
always {
notAborted {
sh ("${Constants.SCRIPTS_DIR}/archive-iso.sh")
}
}
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,65 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'DRY_RUN'
)
booleanParam (
name: 'BUILD_PACKAGES'
)
string (
name: 'BUILD_PACKAGES_LIST'
)
booleanParam (
name: 'BUILD_RT'
)
booleanParam (
name: 'CLEAN_PACKAGES'
)
}
stages {
stage ("build-packages") {
steps {
sh ("${Constants.SCRIPTS_DIR}/build-packages.sh")
}
}
}
post {
always {
notAborted {
sh ("${Constants.SCRIPTS_DIR}/archive-packages.sh")
}
}
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,64 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'DRY_RUN'
)
booleanParam (
name: 'USE_DOCKER_CACHE',
)
string (
name: 'BUILD_STREAM'
)
string (
name: 'DOCKER_IMAGE_LIST'
)
booleanParam (
name: 'FORCE_BUILD_WHEELS'
)
}
stages {
stage ("build-wheels") {
steps {
sh ("${Constants.SCRIPTS_DIR}/build-wheels.sh")
}
}
}
post {
always {
notAborted {
sh ("${Constants.SCRIPTS_DIR}/archive-wheels.sh")
}
}
cleanup {
cleanupPartJob (logLabel: params.BUILD_STREAM)
}
}
}

View File

@ -0,0 +1,71 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME',
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'DRY_RUN'
)
string (
name: 'BUILD_PACKAGES_LIST'
)
booleanParam (
name: 'CLEAN_PACKAGES'
)
booleanParam (
name: 'CLEAN_ISO'
)
booleanParam (
name: 'CLEAN_REPOMGR'
)
booleanParam (
name: 'CLEAN_DOWNLOADS'
)
booleanParam (
name: 'CLEAN_DOCKER'
)
booleanParam (
name: 'IMPORT_BUILD'
)
string (
name: 'IMPORT_BUILD_DIR'
)
}
stages {
stage ("clean-build") {
steps {
sh ("${Constants.SCRIPTS_DIR}/clean-build.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,47 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME',
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
string (
name: 'REFRESH_SOURCE'
)
}
stages {
stage ("clone-source") {
steps {
sh ("${Constants.SCRIPTS_DIR}/clone-source.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,44 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME',
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
}
stages {
stage ("configure-build") {
steps {
sh ("bash ${Constants.SCRIPTS_DIR}/configure-build.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,62 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'FORCE_BUILD'
)
booleanParam (
name: 'BUILD_DOCKER_IMAGES_DEV'
)
booleanParam (
name: 'BUILD_DOCKER_IMAGES_STABLE'
)
}
environment {
PATH = "/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin"
SCRIPTS_DIR = "${WORKSPACE}/v3/scripts"
BUILD_HOME = "${BUILD_HOME}"
TIMESTAMP = "${TIMESTAMP}"
FORCE_BUILD = "${FORCE_BUILD}"
BUILD_DOCKER_IMAGES_DEV = "${BUILD_DOCKER_IMAGES_DEV}"
BUILD_DOCKER_IMAGES_STABLE = "${BUILD_DOCKER_IMAGES_STABLE}"
}
stages {
stage ("create-changelog") {
steps {
sh ("${SCRIPTS_DIR}/create-changelog.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,47 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'PUSH_DOCKER_IMAGES'
)
}
stages {
stage ("download-prerequisites") {
steps {
sh ("bash ${Constants.SCRIPTS_DIR}/docker-login.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,55 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'DRY_RUN'
)
booleanParam (
name: 'BUILD_RT'
)
}
stages {
stage ("download-prerequisites") {
steps {
sh ("${Constants.SCRIPTS_DIR}/download-prerequisites.sh")
}
}
}
post {
always {
notAborted {
sh ("${Constants.SCRIPTS_DIR}/archive-prerequisites.sh")
}
}
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,46 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME',
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
}
stages {
stage ("init-env") {
steps {
sh ("${Constants.SCRIPTS_DIR}/init-env.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,50 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'DRY_RUN'
)
string (
name: 'BUILD_STREAM'
)
}
stages {
stage ("publish-docker-images") {
steps {
sh ("${Constants.SCRIPTS_DIR}/publish-docker-images.sh")
}
}
}
post {
cleanup {
cleanupPartJob (logLabel: params.BUILD_STREAM)
}
}
}

View File

@ -0,0 +1,47 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'DRY_RUN'
)
}
stages {
stage ("publish-helm-charts") {
steps {
sh ("${Constants.SCRIPTS_DIR}/publish-helm-charts.sh")
}
}
}
post {
cleanup {
cleanupPartJob (logLabel: params.BUILD_STREAM)
}
}
}

View File

@ -0,0 +1,51 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'DRY_RUN'
)
booleanParam (
name: 'BUILD_RT'
)
}
stages {
stage ("publish-iso") {
steps {
sh ("bash ${Constants.SCRIPTS_DIR}/publish-iso.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,60 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'DRY_RUN'
)
}
stages {
stage ("archive-jenkins-logs") {
steps { script {
if (params.BUILD_HOME) {
final String build_conf = "${params.BUILD_HOME}/build.conf"
final String jenkins_api_credentials_id = sh (returnStdout: true,
script: """#!/bin/bash
set -e
if [[ -f "${build_conf}" ]] ; then
source "${build_conf}"
echo -n "\${JENKINS_API_CREDENTIALS_ID}"
fi
"""
);
withEnv (["BUILD_HOME=${params.BUILD_HOME}"]) {
withCredentials ([usernameColonPassword (
credentialsId: jenkins_api_credentials_id,
variable: 'JENKINS_API_USERPASS')]) {
sh "v3/scripts/publish-logs.sh"
}
}
}
} }
}
}
}

View File

@ -0,0 +1,51 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'DRY_RUN'
)
booleanParam (
name: 'BUILD_RT'
)
}
stages {
stage ("publish-packages") {
steps {
sh ("bash ${Constants.SCRIPTS_DIR}/publish-packages.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,50 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME'
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'DRY_RUN'
)
string (
name: 'BUILD_STREAM'
)
}
stages {
stage ("publish-wheels") {
steps {
sh ("${Constants.SCRIPTS_DIR}/publish-wheels.sh")
}
}
}
post {
cleanup {
cleanupPartJob (logLabel: params.BUILD_STREAM)
}
}
}

View File

@ -0,0 +1,50 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME',
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
booleanParam (
name: 'REBUILD_BUILDER_IMAGES'
)
booleanParam (
name: 'USE_DOCKER_CACHE'
)
}
stages {
stage ("start-containers") {
steps {
sh ("${Constants.SCRIPTS_DIR}/start-containers.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,50 @@
// vim: syn=groovy
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string (
name: 'MASTER_JOB_NAME'
)
string (
name: 'MASTER_BUILD_NUMBER'
)
string (
name: 'JENKINS_SCRIPTS_BRANCH'
)
string (
name: 'BUILD_HOME',
)
string (
name: 'TIMESTAMP'
)
string (
name: 'PUBLISH_TIMESTAMP'
)
}
environment {
PATH = "/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin"
SCRIPTS_DIR = "${WORKSPACE}/v3/scripts"
BUILD_HOME = "${BUILD_HOME}"
TIMESTAMP = "${TIMESTAMP}"
}
stages {
stage ("stop-containers") {
steps {
sh ("bash ${SCRIPTS_DIR}/stop-containers.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,2 @@
@groovy.transform.Field
def SCRIPTS_DIR = "${WORKSPACE}/v3/scripts"

View File

@ -0,0 +1,3 @@
def call(final args = [:]) {
saveCurrentJenkinsBuildInfo (args)
}

View File

@ -0,0 +1,14 @@
def call() {
return params.collect {
if (it.value instanceof Boolean) {
return booleanParam (name: it.key, value: it.value)
}
else if (it.value instanceof String) {
return string (name: it.key, value: it.value)
}
else {
error "unsupported parameter type: key=${it.key} type=${it.value.class}"
}
}
}

View File

@ -0,0 +1,5 @@
def call (final callback) {
if (currentBuild.result != 'ABORTED') {
callback()
}
}

View File

@ -0,0 +1,14 @@
def call(final args = [:]) {
if (params.BUILD_HOME) {
final String logLabel = args.logLabel ?: ''
withEnv (["BUILD_HOME=${params.BUILD_HOME}",
"LOG_LABEL=${logLabel}"]) {
sh """#!/bin/bash
set -e
if [[ -d "${BUILD_HOME}/jenkins" ]] ; then
echo ${JOB_NAME},${BUILD_NUMBER},${BUILD_URL},${logLabel} >>"${BUILD_HOME}/jenkins/builds.txt"
fi
"""
}
}
}

View File

@ -0,0 +1,6 @@
def call() {
if (params.MASTER_JOB_NAME) {
final masterBuildName = params.MASTER_JOB_NAME + ' #' + params.MASTER_BUILD_NUMBER + ' - ' + params.TIMESTAMP
currentBuild.description = masterBuildName
}
}

95
scripts/00_junk/publish-iso.sh Executable file
View File

@ -0,0 +1,95 @@
#!/bin/bash
#
# Copyright (c) 2022 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_RT
load_build_env
if $DRY_RUN ; then
bail "DRY_RUN=true not supported, bailing out"
fi
DISTRTO=debian
declare -a BUILD_TYPES=("std")
if $BUILD_RT ; then
BUILD_TYPES+=("rt")
fi
make_deb_repo() {
gen_deb_repo_meta_data $DRY_RUN_ARG "$@"
}
hardlink_or_copy_file() {
local src_file="$1"
local dst_file="$2"
: <"$src_file" || exit 1
rm -f "$dst_file"
ln -n "$src_file" "$dst_file" || cp "$src_file" "$dst_file" || exit 1
}
# -----------------------
set -x
RETRIES=2
RETRY_INTERVAL_SEC=5
echo "PUBLISH: TIMESTAMP=${TIMESTAMP}"
echo "PUBLISH: BUILD_OUTPUT_HOME=${BUILD_OUTPUT_HOME}"
echo "PUBLISH: BUILD_HOME=${BUILD_HOME}"
echo "PUBLISH: DISTRO=${DISTRO}"
echo "PUBLISH: MANIFEST_BRANCH=${MANIFEST_BRANCH}"
export
source ${LIB_DIR}/retries.sh
source ${LIB_DIR}/file_utils.sh
function with_default_retries {
local cmd=$1
shift 1
with_retries ${RETRIES:-1} ${RETRY_INTERVAL_SEC:-1} "${cmd}" "$@"
}
PUBLISH_OUTPUTS_DIR="${PUBLISH_DIR}/outputs"
for BT in "${BUILD_TYPES[@]}" ; do
ISO_OUTPUT="${BUILD_OUTPUT_HOME}/localdisk/lat/${BT}/deploy"
if [ -d "${ISO_OUTPUT}" ]; then
PUBLISH_ISO_DIR="${PUBLISH_OUTPUTS_DIR}/${BT}/iso"
with_default_retries mkdir -p ${PUBLISH_ISO_DIR}
for ISO in $(find ${ISO_OUTPUT} -name 'starlingx*.iso'); do
if [ "${BT}" == "std" ]; then
B_NAME=$(basename "${ISO}")
else
B_NAME=$(basename "${ISO}" | sed "s/starlingx-/starlingx-${BT}-/")
fi
if [ -L "${ISO}" ] ; then
src_iso="$(readlink -f "${ISO}")" || exit 1
else
src_iso="${ISO}"
fi
src_sig="${src_iso%.iso}.sig"
cp_or_link "${src_iso}" "${PUBLISH_ISO_DIR}"
if [[ -f "$src_sig" ]] ; then
cp -f "${src_sig}" "${PUBLISH_ISO_DIR}"
fi
link_target="$(basename "${src_iso}")"
if [ "${link_target}" != "${B_NAME}" ] ; then
ln -s -f -n "${link_target}" "${PUBLISH_ISO_DIR}/${B_NAME}" || exit 1
sig_link_target="${link_target%.iso}.sig"
sig_link="${PUBLISH_ISO_DIR}/${B_NAME%.iso}.sig"
ln -s -f -n "${sig_link_target}" "${sig_link}"
fi
done
fi
done

View File

@ -0,0 +1,146 @@
#!/bin/bash
#
# Copyright (c) 2022 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_RT
declare_env DRY_RUN
load_build_env
BUILD_OUTPUT="$BUILD_OUTPUT_HOME"
PUBLISH_BRANCH_ROOT="$BUILD_OUTPUT_HOME/export"
declare -a BUILD_TYPES=("std")
if $BUILD_RT ; then
BUILD_TYPES+=("rt")
fi
if $DRY_RUN ; then
echo "DRY_RUN=true not supported, bailing out"
exit 0
fi
make_deb_repo() {
gen_deb_repo_meta_data $DRY_RUN_ARG "$@"
}
# -----------------------
#set -x
RETRIES=2
RETRY_INTERVAL_SEC=5
CHECKSUM_FN=stx-checksums
source ${LIB_DIR}/retries.sh
source ${LIB_DIR}/file_utils.sh
function with_default_retries {
local cmd=$1
shift 1
with_retries ${RETRIES:-1} ${RETRY_INTERVAL_SEC:-1} "${cmd}" "$@"
}
PUBLISH_INPUTS_DIR="${PUBLISH_DIR}/inputs"
PUBLISH_OUTPUTS_DIR="${PUBLISH_DIR}/outputs"
echo "PUBLISH: PUBLISH_ROOT=${PUBLISH_ROOT}"
echo "PUBLISH: PUBLISH_INPUTS_DIR=${PUBLISH_INPUTS_DIR}"
echo "PUBLISH: PUBLISH_OUTPUTS_DIR=${PUBLISH_OUTPUTS_DIR}"
# Search for checksum files
# $PUBLISH_ROOT/<any layers>/<any timestamps>/$PUBLISH_SUBDIR
CHECKSUM_FILES=$(
if [[ -d "${PUBLISH_ROOT}" ]] ; then
{ # timestamp dirs
find "$PUBLISH_ROOT" -regextype posix-extended -mindepth 1 -maxdepth 1 -type d -regex '.*/[0-9]{4}.*$'
} | { # publish subdir
while read dir ; do
if [[ -n "$PUBLISH_SUBDIR" && -d "$dir/$PUBLISH_SUBDIR" ]] ; then
echo "$dir/$PUBLISH_SUBDIR"
fi
done
} | { # checksums
xargs -r -i find '{}' -type f -name "${CHECKSUM_FN}"
}
fi
)
PKGS_INPUT="${BUILD_OUTPUT}/mirrors/starlingx/binaries"
if [ -d "${PKGS_INPUT}" ]; then
PUBLISH_INPUTS_PKG_DIR="${PUBLISH_INPUTS_DIR}/packages"
with_default_retries mkdir -p ${PUBLISH_INPUTS_PKG_DIR}
for PKG in $(find ${PKGS_INPUT} -name '*.deb'); do
with_default_retries cp_or_link "${PKG}" "${PUBLISH_INPUTS_PKG_DIR}" $CHECKSUM_FILES
done
get_file_data_from_dir "${PUBLISH_INPUTS_PKG_DIR}" "${PUBLISH_INPUTS_PKG_DIR}/${CHECKSUM_FN}"
CHECKSUM_FILES+=" ${PUBLISH_INPUTS_PKG_DIR}/${CHECKSUM_FN}"
make_deb_repo "${PUBLISH_INPUTS_PKG_DIR}"
fi
SRCS_INPUT="${BUILD_OUTPUT}/mirrors/starlingx/sources"
echo "SRCS_INPUT=$SRCS_INPUT"
if [ -d "${SRCS_INPUT}" ]; then
PUBLISH_INPUTS_SRC_DIR="${PUBLISH_INPUTS_DIR}/sources"
echo "PUBLISH_INPUTS_SRC_DIR=$PUBLISH_INPUTS_SRC_DIR"
for PKG_SRC_INPUT in $(find "${SRCS_INPUT}" -maxdepth 1 -type d) ; do
PUBLISH_INPUT_SRC_PKG_DIR="${PUBLISH_INPUTS_SRC_DIR}/$(basename "${PKG_SRC_INPUT}")"
for f in $(find ${PKG_SRC_INPUT} -maxdepth 1 -type f ); do
with_default_retries mkdir -p ${PUBLISH_INPUT_SRC_PKG_DIR}
with_default_retries cp_or_link "${f}" "${PUBLISH_INPUT_SRC_PKG_DIR}" $CHECKSUM_FILES
done
done
if [ -d "${PUBLISH_INPUTS_SRC_DIR}" ]; then
get_file_data_from_dir "${PUBLISH_INPUTS_SRC_DIR}" "${PUBLISH_INPUTS_SRC_DIR}/${CHECKSUM_FN}"
CHECKSUM_FILES+=" ${PUBLISH_INPUTS_SRC_DIR}/${CHECKSUM_FN}"
fi
fi
for BT in "${BUILD_TYPES[@]}" ; do
BT_OUTPUT="${BUILD_OUTPUT}/localdisk/loadbuild/jenkins/${PROJECT}/${BT}"
if [ -d "${BT_OUTPUT}" ]; then
PUBLISH_OUTPUTS_SRC_DIR="${PUBLISH_OUTPUTS_DIR}/${BT}/sources"
PUBLISH_OUTPUTS_PKG_DIR="${PUBLISH_OUTPUTS_DIR}/${BT}/packages"
for PKG_OUTPUT in $(find "${BT_OUTPUT}" -maxdepth 1 -type d) ; do
echo "PKG_OUTPUT=${PKG_OUTPUT}"
if [ $(find "${PKG_OUTPUT}" -maxdepth 1 -type f -name '*.dsc' | wc -l) -ne 0 ]; then
PUBLISH_OUTPUTS_SRC_PKG_DIR="${PUBLISH_OUTPUTS_SRC_DIR}/$(basename "${PKG_OUTPUT}")"
with_default_retries mkdir -p "${PUBLISH_OUTPUTS_SRC_PKG_DIR}"
for f in $(find ${PKG_OUTPUT} -maxdepth 1 -type f -not -name '*deb' \
-and -not -name '*buildinfo' \
-and -not -name '*changes' \
-and -not -name '*build' \
-and -not -name '*log' ); do
with_default_retries cp_or_link "${f}" "${PUBLISH_OUTPUTS_SRC_PKG_DIR}" $CHECKSUM_FILES
done
fi
if [ $(find "${PKG_OUTPUT}" -maxdepth 1 -type f -name '*.deb' | wc -l) -ne 0 ]; then
with_default_retries mkdir -p "${PUBLISH_OUTPUTS_PKG_DIR}"
for f in $(find ${PKG_OUTPUT} -maxdepth 1 -type f -name '*deb' ); do
with_default_retries cp_or_link "${f}" "${PUBLISH_OUTPUTS_PKG_DIR}" $CHECKSUM_FILES
done
fi
done
if [ -d "${PUBLISH_OUTPUTS_SRC_DIR}" ]; then
get_file_data_from_dir "${PUBLISH_OUTPUTS_SRC_DIR}" "${PUBLISH_OUTPUTS_SRC_DIR}/${CHECKSUM_FN}"
CHECKSUM_FILES+=" ${PUBLISH_OUTPUTS_SRC_DIR}/${CHECKSUM_FN}"
fi
if [ -d "${PUBLISH_OUTPUTS_PKG_DIR}" ]; then
get_file_data_from_dir "${PUBLISH_OUTPUTS_PKGS_ROOT}" "${PUBLISH_OUTPUTS_PKG_DIR}/${CHECKSUM_FN}"
CHECKSUM_FILES+=" ${PUBLISH_OUTPUTS_PKG_DIR}/${CHECKSUM_FN}"
make_deb_repo "${PUBLISH_OUTPUTS_PKG_DIR}"
fi
fi
done

View File

@ -0,0 +1,22 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_STREAM
load_build_env
#VERBOSE_ARG="--verbose"
mkdir -p "$BUILD_OUTPUT_HOME"
if [[ -d "$BUILD_HOME/$WORKSPACE_ROOT_SUBDIR/std/build-images" ]] ; then
mkdir -p "$BUILD_OUTPUT_HOME/$WORKSPACE_ROOT_SUBDIR/std"
ln -sfn "$WORKSPACE_ROOT_SUBDIR" "$BUILD_OUTPUT_HOME/workspace"
safe_copy_dir $DRY_RUN_ARG $VERBOSE_ARG \
"$BUILD_HOME/workspace/std/build-images" \
"$BUILD_OUTPUT_HOME/workspace/std/"
fi

17
scripts/archive-helm-charts.sh Executable file
View File

@ -0,0 +1,17 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
load_build_env
#VERBOSE_ARG="--verbose"
if [[ -d "$BUILD_HOME/workspace/std/build-helm" ]] ; then
mkdir -p "$BUILD_OUTPUT_HOME"
my_user="$(id -u)"
my_group="$(id -g)"
safe_copy_dir $DRY_RUN_ARG $VERBOSE_ARG --chown $my_user:$my_group \
"$BUILD_HOME/workspace/std/build-helm" \
"$BUILD_OUTPUT_HOME/workspace/std/"
fi

47
scripts/archive-iso.sh Executable file
View File

@ -0,0 +1,47 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_RT
load_build_env
LAT_SUBDIR="localdisk/lat"
#VERBOSE_ARG="--verbose"
build_types=("std")
if $BUILD_RT ; then
build_types+=("rt")
fi
$BUILD_ISO || bail "BUILD_ISO=false, bailing out"
declare -a iso_files
mkdir -p "${BUILD_OUTPUT_HOME}/localdisk"
for build_type in "${build_types[@]}" ; do
src_dir="${BUILD_HOME}/${LAT_SUBDIR}/${build_type}"
dst_dir="${BUILD_OUTPUT_HOME}/${LAT_SUBDIR}/${build_type}"
if [[ -d "${src_dir}" ]] ; then
notice "archving $src_dir"
mkdir -p "$dst_dir"
safe_copy_dir $DRY_RUN_ARG $VERBOSE_ARG "${src_dir}/" "${dst_dir}/"
if [[ -e "${dst_dir}/deploy" ]] ; then
iso_files+=($(find "${dst_dir}/deploy" -mindepth 1 -maxdepth 1 -type f))
fi
fi
done
if [[ "${#iso_files[@]}" -gt 0 ]] ; then
notice "changing file ownership to $USER"
safe_chown $DRY_RUN_ARG $VERBOSE_ARG "$USER:" "${iso_files[@]}"
fi
if ! $DRY_RUN ; then
if [[ -d "$BUILD_OUTPUT_HOME/localdisk/lat/std/deploy" ]] ; then
ln -sfn "lat/std/deploy" "${BUILD_OUTPUT_HOME}/localdisk/deploy"
else
rm -f "$BUILD_OUTPUT_HOME/localdisk/deploy"
fi
fi

33
scripts/archive-misc.sh Executable file
View File

@ -0,0 +1,33 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
load_build_env
#VERBOSE_ARG="--verbose"
mkdir -p "$BUILD_OUTPUT_HOME"
safe_copy_dir $DRY_RUN_ARG $VERBOSE_ARG \
--exclude /aptly \
--exclude /localdisk/channel/\*\* \
--exclude /localdisk/designer \
--exclude /mirrors \
--exclude /localdisk/lat \
"$BUILD_HOME/" "$BUILD_OUTPUT_HOME/"
# localdist/loadbuild/$USER
mkdir -p "$BUILD_OUTPUT_HOME/$(dirname "$REPO_ROOT_SUBDIR")"
# localdisk/designer/$USER/$PROJECT => $BUILD_HOME/...
ln -sfn "$BUILD_HOME/$REPO_ROOT_SUBDIR" "$BUILD_OUTPUT_HOME/$REPO_ROOT_SUBDIR"
# repo => localdisk/designer/$USER/$PROJECT
ln -sfn "$REPO_ROOT_SUBDIR" "$BUILD_OUTPUT_HOME/repo"
# workspace => localdist/loadbuild/$USER/PROJECT
ln -sfn "$WORKSPACE_ROOT_SUBDIR" "$BUILD_OUTPUT_HOME/workspace"
# aptly => $BUILD_HOME/...
ln -sfn "$BUILD_HOME/aptly" "$BUILD_OUTPUT_HOME/aptly"

21
scripts/archive-packages.sh Executable file
View File

@ -0,0 +1,21 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_RT
load_build_env
#VERBOSE_ARG="--verbose"
$BUILD_PACKAGES || bail "BUILD_PACKAGES=false, skipping build"
if [[ -d "$BUILD_HOME/$WORKSPACE_ROOT_SUBDIR" ]] ; then
my_user="$(id -u)"
my_group="$(id -g)"
mkdir -p "$BUILD_OUTPUT_HOME/$WORKSPACE_ROOT_SUBDIR"
safe_copy_dir --chown "$my_user:$my_group" $DRY_RUN_ARG $VERBOSE_ARG \
"$BUILD_HOME/$WORKSPACE_ROOT_SUBDIR/" "$BUILD_OUTPUT_HOME/$WORKSPACE_ROOT_SUBDIR/"
ln -sfn "$WORKSPACE_ROOT_SUBDIR" "$BUILD_OUTPUT_HOME/workspace"
fi

View File

@ -0,0 +1,15 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_RT
load_build_env
set -x
if [[ -d "$BUILD_HOME/mirrors" ]] ; then
mkdir -p "$BUILD_OUTPUT_HOME"
ln -sfn "$BUILD_HOME/mirrors" "$BUILD_OUTPUT_HOME/"
fi

19
scripts/archive-wheels.sh Executable file
View File

@ -0,0 +1,19 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_STREAM
load_build_env
#VERBOSE_ARG="--verbose"
mkdir -p "$BUILD_OUTPUT_HOME"
src_dir="$BUILD_HOME/workspace/std/build-wheels-$DOCKER_BASE_OS-$BUILD_STREAM"
dst_dir="$BUILD_OUTPUT_HOME/workspace/std/"
if [[ -d "$src_dir" ]] ; then
mkdir -p "$dst_dir"
safe_copy_dir $DRY_RUN_ARG $VERBOSE_ARG "$src_dir" "$dst_dir"
fi

63
scripts/build-docker-base.sh Executable file
View File

@ -0,0 +1,63 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_HOME
require_env DRY_RUN
require_env USE_DOCKER_CACHE
require_env BUILD_STREAM stable
require_env PUSH_DOCKER_IMAGES
declare_env PUBLISH_ROOT_URL
declare_env DOCKER_IMAGE_BASE
load_build_env
if [[ -n "$DOCKER_IMAGE_BASE" ]] ; then
bail "DOCKER_IMAGE_BASE is set, bailing out"
fi
base_image_tag="$BUILD_BRANCH-$BUILD_STREAM-$TIMESTAMP"
base_image_latest_tag="$BUILD_BRANCH-$BUILD_STREAM-latest"
declare -a cmd=(
"./build-stx-base.sh"
"--os=$DOCKER_BASE_OS"
"--version=$base_image_tag"
"--attempts=$DOCKER_BUILD_RETRY_COUNT"
"--stream=$BUILD_STREAM"
"--registry=$DOCKER_REGISTRY"
"--user=$DOCKER_REGISTRY_ORG"
"--latest"
"--latest-tag=$base_image_latest_tag"
)
if [[ "$USE_DOCKER_CACHE" == true ]] ; then
cmd+=("--cache")
fi
if $USE_POD_URLS_IN_DOCKER_IMAGES ; then
cmd+=("--local")
else
require_env PUBLISH_ROOT_URL
cmd+=("--repo 'deb [trusted=yes check-valid-until=0] $PUBLISH_URL/inputs/packages ./'")
cmd+=("--repo 'deb [trusted=yes check-valid-until=0] $PUBLISH_URL/outputs/std/packages ./'")
fi
# build-stx-base.sh can only push to one repo. We will push to any
# additional repos manually.
if $PUSH_DOCKER_IMAGES ; then
cmd+=("--push")
fi
# build it
stx_docker_cmd $DRY_RUN_ARG "cd \$MY_REPO/build-tools/build-docker-images && ${cmd[*]}"
# retag and push it to extra registries
if $PUSH_DOCKER_IMAGES ; then
for reg in $EXTRA_REGISTRY_PREFIX_LIST ; do
stx_docker_cmd $DRY_RUN_ARG "docker tag $DOCKER_REGISTRY/$DOCKER_REGISTRY_ORG/stx-$DOCKER_BASE_OS:$base_image_tag $reg/stx-$DOCKER_BASE_OS:$base_image_tag"
stx_docker_cmd $DRY_RUN_ARG "docker push $reg/stx-$DOCKER_BASE_OS:$base_image_tag"
done
fi

100
scripts/build-docker-images.sh Executable file
View File

@ -0,0 +1,100 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_HOME
require_env DRY_RUN
require_env USE_DOCKER_CACHE
require_env BUILD_STREAM stable
require_env PUSH_DOCKER_IMAGES
declare_env DOCKER_IMAGE_LIST
declare_env DOCKER_IMAGE_BASE
load_build_env
DOCKER_OS_LIST="$DOCKER_BASE_OS distroless"
wheels_file="std/build-wheels-$DOCKER_BASE_OS-$BUILD_STREAM/stx-$DOCKER_BASE_OS-$BUILD_STREAM-wheels.tar"
#require_file "$HOST_WORKSPACE/$wheels_file"
if [[ -n "$DOCKER_IMAGE_BASE" ]] ; then
base_img="$DOCKER_IMAGE_BASE"
else
base_image_tag="$BUILD_BRANCH-$BUILD_STREAM-$TIMESTAMP"
base_img="$DOCKER_REGISTRY/$DOCKER_REGISTRY_ORG/stx-$DOCKER_BASE_OS:$base_image_tag"
fi
declare -a cmd=(
"./build-stx-images.sh"
"--attempts=$DOCKER_BUILD_RETRY_COUNT"
"--stream=$BUILD_STREAM"
"--base=$base_img"
"--no-pull-base"
"--version=$TIMESTAMP"
"--prefix=$BUILD_BRANCH"
"--registry=$DOCKER_REGISTRY"
"--user=$DOCKER_REGISTRY_ORG"
"--latest"
)
if [[ -f "$WORKSPACE_ROOT/$wheels_file" ]] ; then
cmd+=("--wheels=\$MY_WORKSPACE/$wheels_file")
fi
if [[ "$USE_DOCKER_CACHE" == true ]] ; then
cmd+=("--cache")
fi
# add --only if $DOCKER_IMAGE_LIST contains anything
if [[ -n "$DOCKER_IMAGE_LIST" ]] ; then
comma=
only=
for img in $(echo "$DOCKER_IMAGE_LIST" | sed 's/[,;]+/ /g') ; do
[[ -n "$img" ]] || continue
only+="${only}${comma}${img}"
comma=","
done
if [[ -n "$only" ]] ; then
cmd+=("--only=$only")
fi
fi
# build-stx-base.sh can only push to one repo. We will push to any
# additional repos manually.
if $PUSH_DOCKER_IMAGES ; then
cmd+=("--push")
fi
# Usage: retag_and_push $IMAGE_LIST_FILE
retag_and_push() {
if [[ -n "$EXTRA_REGISTRY_PREFIX_LIST" ]] ; then
local list_file="$1"
local src_img
for src_img in $(grep -E -v '^\s*(#.*)?$' $list_file) ; do
local reg_prefix base_img
base_img="${src_img#$DOCKER_REGISTRY/$DOCKER_REGISTRY_ORG}"
if [[ "$base_img" == "$src_img" ]] ; then
die "$list_file: unexpected image \"$src_img\""
fi
for reg_prefix in $EXTRA_REGISTRY_PREFIX_LIST ; do
local dst_img="$(echo "${reg_prefix}/$base_img" | sed 's!//*!/!g')"
stx_docker_cmd $DRY_RUN_ARG "docker tag $src_img $dst_img"
stx_docker_cmd $DRY_RUN_ARG "docker push $dst_img"
done
done
fi
}
# build them
lists_dir="$HOST_WORKSPACE/std/build-images"
for os in $(echo $DOCKER_OS_LIST | sed 's/,/ /g') ; do
list_file="$lists_dir/images-$os-$BUILD_STREAM-versioned.lst"
notice "building $BUILD_STREAM $os images"
$DRY_RUN || rm -f "$list_file"
stx_docker_cmd $DRY_RUN_ARG "cd \$MY_REPO/build-tools/build-docker-images && ${cmd[*]} --os=$os"
if $PUSH_DOCKER_IMAGES && [[ -f "$list_file" ]] ; then
retag_and_push "$list_file"
fi
done

74
scripts/build-helm-charts.sh Executable file
View File

@ -0,0 +1,74 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_HOME
require_env DRY_RUN
load_build_env
BUILD_STREAMS="stable dev"
BUILD_TAGS="latest versioned"
# find image dirs relative to WORKSPACE_ROOT
declare -a image_dirs
if [[ -d "$WORKSPACE_ROOT/std/build-images" ]] ; then
image_dirs+=("std/build-images")
fi
if [[ -d "$WORKSPACE_ROOT/rt/build-images" ]] ; then
image_dirs+=("rt/build-images")
fi
# copy any extra image-*.lst files to workspace so that
# build containers can see them
if [[ -d "$EXTRA_IMAGE_RECORD_DIR" ]] ; then
if ! $DRY_RUN ; then
rm -rf --one-file-system "$WORKSPACE_ROOT/extra-image-records"/*
mkdir -p "$WORKSPACE_ROOT/extra-image-records"
find "$EXTRA_IMAGE_RECORD_DIR" \
-mindepth 1 -maxdepth 1 -name 'images-*.lst' \
-exec \cp --force --preserve=links --no-dereference -t "$WORKSPACE_ROOT/extra-image-records" '{}' '+' \
|| exit 1
image_dirs+=('extra-image-records')
fi
fi
build_helm_charts() {
local cmd="$1"
stx_docker_cmd $DRY_RUN_ARG "set -e ; cd \"\$MY_REPO/build-tools\" ; export PATH=\"\$PWD:\$PATH\" ; $cmd"
}
# call build-helm-charts.sh in container for each stream/tag
if [[ "${#image_dirs[@]}" -gt 0 ]] ; then
for build_stream in $BUILD_STREAMS ; do
for build_tag in $BUILD_TAGS ; do
for os in $DOCKER_BASE_OS ; do
label="${os}-${build_stream}-${build_tag}"
distroless_label="distroless-${build_stream}-${build_tag}"
# look for image list files
image_arg=$(
sep=
( cd "$WORKSPACE_ROOT" && find "${image_dirs[@]}" \
-mindepth 1 -maxdepth 1 -name "images-${label}.lst" -o -name "images-${distroless_label}.lst" ; ) \
| while read image_list_file ; do
echo -n "${sep}\$MY_WORKSPACE/${image_list_file}"
sep=","
done
check_pipe_status || exit 1
)
check_pipe_status || exit 1
if [[ -n "$image_arg" ]] ; then
cmd="build-helm-charts.sh"
cmd+=" --os ${os}"
cmd+=" --image-record ${image_arg}"
cmd+=" --label '${label}'"
cmd+=" --verbose"
cmd+=" | tee \"\$MY_WORKSPACE/helm-${label}.log\""
cmd+=" ; [[ \${PIPESTATUS[0]} -eq 0 ]]"
build_helm_charts "$cmd" || exit 1
fi
done
done
done
fi

23
scripts/build-iso.sh Executable file
View File

@ -0,0 +1,23 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_HOME
require_env BUILD_ISO
require_env BUILD_RT
load_build_env
$BUILD_ISO || bail "BUILD_ISO=false, bailing out"
notice "building STD ISO"
stx_docker_cmd $DRY_RUN_ARG "build-image --std"
if ! $DRY_RUN ; then
ln -sfn lat/std/deploy "$BUILD_HOME/localdisk/deploy"
fi
if $BUILD_RT ; then
notice "building RT ISO"
stx_docker_cmd $DRY_RUN_ARG "build-image --rt"
fi

57
scripts/build-packages.sh Executable file
View File

@ -0,0 +1,57 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_HOME
require_env BUILD_PACKAGES
declare_env BUILD_PACKAGES_LIST
require_env BUILD_RT
require_env CLEAN_PACKAGES
load_build_env
$BUILD_PACKAGES || bail "BUILD_PACKAGES=false, skipping build"
BUILD_PACKAGES_LIST=$(trim $(echo $BUILD_PACKAGES_LIST | sed 's/,/ /g'))
info "CLEAN_PACKAGES=$CLEAN_PACKAGES"
info "BUILD_PACKAGES_LIST=$BUILD_PACKAGES_LIST"
# Always build std, rt only if requested
build_types="std"
if $BUILD_RT ; then
build_types+=",rt"
fi
count=0
success=0
# Build all packages a few times
declare -a extra_args
while [[ $count -lt $BUILD_PACKAGES_ITERATIONS ]] ; do
extra_args=()
# # clean on 1st iteration only if CLEAN_BUILD was set and we are building
# # specific packages
# if [[ $count == 0 ]] && $CLEAN_PACKAGES && [[ -n $BUILD_PACKAGES_LIST ]] ; then
# extra_args+=("-c")
# fi
# Either build specific or all packages
if [[ -n $BUILD_PACKAGES_LIST ]] ; then
extra_args+=("-p" "$(echo $BUILD_PACKAGES_LIST | sed 's/ /,/g')")
else
extra_args+=("-a")
fi
# buld'em
if stx_docker_cmd $DRY_RUN_ARG $VEBOSE_ARG "build-pkgs ${extra_args[*]} -b $build_types" ; then
success=1
else
success=0
fi
let ++count
done
if [[ $success -ne 1 ]] ; then
notice "Failed to build packages after $BUILD_PACKAGES_ITERATIONS iterations"
exit 1
fi

84
scripts/build-wheels.sh Executable file
View File

@ -0,0 +1,84 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_HOME
require_env USE_DOCKER_CACHE
require_env DRY_RUN
require_env BUILD_STREAM stable
require_env DOCKER_IMAGE_LIST
require_env FORCE_BUILD_WHEELS
load_build_env
BUILD_STREAM=stable
DOCKER_IMAGE_LIST=$(trim $(echo $DOCKER_IMAGE_LIST | sed 's/,/ /'))
image_requires_wheels() {
local -a parts
parts=($(source "$1" && echo "$BUILDER ${LABEL:-$PROJECT}"))
local builder=${parts[0]}
local name=${parts[1]}
if [[ "$builder" != "loci" ]] ; then
return 1
fi
if [[ -n "${DOCKER_IMAGE_LIST}" ]] && ! in_list "$name" "$DOCKER_IMAGE_LIST" ; then
return 1
fi
return 0
}
wheels_required() {
local -i wheels_images=0
local projects
projects="$(cd "$REPO_ROOT" && repo forall -c 'echo $REPO_PATH' 2>/dev/null)"
local proj
for proj in $projects ; do
local os
for os in $DOCKER_OS_LIST ; do
local inc
for inc in $(find "$REPO_ROOT/$proj" -maxdepth 2 -type f -name "${os}_${BUILD_STREAM}_docker_images.inc") ; do
local basedir
local dir
basedir="$(dirname "$inc")"
for dir in $(grep -E -v '^\s*(#.*)?$' "$inc") ; do
local img_dir="$basedir/$dir/$os"
if [[ -d "$img_dir" ]] ; then
for img_file in $(find "$img_dir" -mindepth 1 -maxdepth 1 -name "*.${BUILD_STREAM}_docker_image") ; do
if image_requires_wheels "$img_file" ; then
let ++wheels_images
echo "${img_file#$REPO_ROOT/}: requires wheels" >&2
fi
done
fi
done
done
done
done
[[ $wheels_images -gt 0 ]] && return 0 || return 1
}
if ! $FORCE_BUILD_WHEELS && ! wheels_required ; then
bail "wheels not required, bailing out"
fi
cmd=(
"./build-wheel-tarball.sh"
"--os=$DOCKER_BASE_OS"
"--stream=$BUILD_STREAM"
"--attempts=$DOCKER_BUILD_RETRY_COUNT"
)
if [[ "$USE_DOCKER_CACHE" == true ]] ; then
cmd+=("--cache")
fi
for python_arg in "" "--python2" ; do
stx_docker_cmd $DRY_RUN_ARG "cd \$MY_REPO/build-tools/build-wheels && ${cmd[*]} $python_arg"
done

94
scripts/clean-build.sh Executable file
View File

@ -0,0 +1,94 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_HOME
require_env CLEAN_PACKAGES
require_env CLEAN_REPOMGR
require_env CLEAN_DOWNLOADS
require_env CLEAN_DOCKER
require_env CLEAN_ISO
require_env IMPORT_BUILD
declare_env IMPORT_BUILD_DIR
load_build_env
#VERBOSE_ARG=--verbose
VERBOSE_ARG=
clean_or_import() {
local -a exclude_args
while [[ "$1" == "--exclude" ]] ; do
exclude_args+=("$1" "$2")
shift 2
done
local src_subdir="$1"
local dst_subdir="$2"
local clean_requested="$3"
local allow_merge="${4:-false}"
local src_dir="$IMPORT_BUILD_DIR/$src_subdir"
if $IMPORT_BUILD && [[ -n "$IMPORT_BUILD_DIR" ]] && [[ -d "$src_dir" ]] ; then
local real_src_dir
real_src_dir="$(readlink -f "$src_dir")"
local delete_arg
if ! $allow_merge ; then
delete_arg="--delete"
fi
notice "importing $src_subdir from $IMPORT_BUILD_DIR"
dst_dir="$BUILD_HOME/$dst_subdir"
mkdir -p "$dst_dir"
safe_copy_dir $DRY_RUN_ARG $VERBOSE_ARG $delete_arg "${exclude_args[@]}" \
"$real_src_dir/" "$dst_dir/"
return
fi
if $clean_requested ; then
notice "removing $dst_subdir"
safe_rm $DRY_RUN_ARG $VERBOSE_ARG "$BUILD_HOME/$dst_subdir"/*
fi
}
if [[ -d "$BUILD_HOME/localdisk/loadbuild" ]] ; then
# If user has changed, there may be subdirectories remaining under
# the innder localdisk/loadbuild named after a different user. Delete them.
declare -a rm_dirs
readarray -t rm_dirs < <(
find "$BUILD_HOME/localdisk/loadbuild" -mindepth 1 -maxdepth 1 \
-type d \! -name "$USER"
)
# If project name has changed, there may be subdirectories named after
# the old project name(s), delete them too.
if [[ -d "$BUILD_HOME/localdisk/loadbuild/$USER" ]] ; then
readarray -O "${#rm_dirs[@]}" -t rm_dirs < <(
find "$BUILD_HOME/localdisk/loadbuild/$USER" -mindepth 1 -maxdepth 1 \
-type d \! -name "$PROJECT"
)
fi
if [[ "${#rm_dirs[@]}" -gt 0 ]] ; then
safe_rm $DRY_RUN_ARG $VERBOSE_ARG "${rm_dirs[@]}"
fi
fi
clean_or_import --exclude /meta-lat --exclude /tmp --exclude /sign \
"workspace" "$WORKSPACE_ROOT_SUBDIR" $CLEAN_PACKAGES
clean_or_import "mirrors" "mirrors" $CLEAN_DOWNLOADS true
clean_or_import "aptly" "aptly" $CLEAN_REPOMGR
clean_or_import "docker" "docker" $CLEAN_DOCKER
clean_or_import "docker" "lat" $CLEAN_ISO
# these files can't be imported, always delete them
notice "removing misc files"
safe_rm $DRY_RUN_ARG $VERBOSE_ARG \
"$BUILD_HOME"/localdisk/*.log \
"$BUILD_HOME"/localdisk/channel \
"$BUILD_HOME"/localdisk/deploy \
"$BUILD_HOME"/localdisk/pkgbuilder \
"$BUILD_HOME"/localdisk/workdir \
"$BUILD_HOME"/localdisk/sub_workdir \
"$BUILD_HOME"/localdisk/tmp \
"$BUILD_HOME"/lat \
\
"$BUILD_OUTPUT_HOME"/{SUCCESS,FAILURE,NEED_BUILD,NO_BUILD_REQUIRED,LAST_COMMITS*,CHANGES}

67
scripts/clone-source.sh Executable file
View File

@ -0,0 +1,67 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
source $(dirname "$0")/../lib/retries.sh
require_env BUILD_HOME
require_env DRY_RUN
require_env REFRESH_SOURCE
load_build_env
RETRIES=3
RETRY_INTERVAL_SEC=15
notice "initializing source repo" \
"BUILD_HOME=$BUILD_HOME"
mkdir -p "$BUILD_HOME"
mkdir -p "$BUILD_HOME/$REPO_ROOT_SUBDIR"
mkdir -p "$BUILD_HOME/$WORKSPACE_ROOT_SUBDIR"
ln -sfn "$REPO_ROOT_SUBDIR" "$REPO_ROOT"
ln -sfn "$WORKSPACE_ROOT_SUBDIR" "$WORKSPACE_ROOT"
shell() {
if [[ "$1" == "--dry-run" ]] ; then
echo ">>> (dry) running:" >&2
echo "$2" >&2
return
fi
echo ">>> running" >&2
echo "$1" >&2
( eval "$1" ; )
}
# clone sources
cd "$REPO_ROOT"
if [[ -f ".repo-init-done" ]] && ! $REFRESH_SOURCE ; then
notice "repo already initialized, exiting"
exit 0
fi
if $DRY_RUN && [[ -f ".repo-init-done" ]] ; then
dry_run_arg="--dry-run"
else
dry_run_arg=
fi
# We can't dry run, since we need the sources
dry_run_arg=
shell $dry_run_arg "repo init -u \"$MANIFEST_URL\" -b \"$MANIFEST_BRANCH\" -m \"$MANIFEST\""
for d in $(repo forall -c 'echo $REPO_PATH' 2>/dev/null) ; do
[[ -d "$d" ]] || continue
shell $dry_run_arg "
set -e ;
cd \"$d\"
git rebase --abort >/dev/null 2>&1 || :
git am --abort >/dev/null 2>&1 || :
git clean -d -f
git checkout .
"
done
with_default_retries shell $dry_run_arg "repo sync --force-sync --force-remove-dirty -j4"
# prevent "stx build prepare" from doing another "repo sync"
shell $dry_run_arg "touch .repo-init-done"

77
scripts/configure-build.sh Executable file
View File

@ -0,0 +1,77 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
set -x
load_build_env
######################################################
# stx.conf
######################################################
rm -f stx.conf
unset DEBIAN_DISTRIBUTION DEBIAN_SNAPSHOT DEBIAN_SECURITY_SNAPSHOT
source ./import-stx
stx config --add builder.myuname "$USER"
stx config --add builder.uid "$USER_ID"
# Embedded in ~/localrc of the build container
stx config --add project.gituser "$USER_NAME"
stx config --add project.gitemail $USER_EMAIL
# This will be included in the name of your build container and the basename for $MY_REPO_ROOT_DIR
stx config --add project.name "$PROJECT"
stx config --add project.proxy false
# debian distro & urls
if [[ -n "$DEBIAN_SNAPSHOT_BASE" ]] ; then
stx config --add project.debian_snapshot_base "$DEBIAN_SNAPSHOT_BASE"
fi
if [[ -n "$DEBIAN_SECURITY_SNAPSHOT_BASE" ]] ; then
stx config --add project.debian_security_snapshot_base "$DEBIAN_SECURITY_SNAPSHOT_BASE"
fi
notice "$PWD/stx.conf"
cat stx.conf
######################################################
# BUILD file
######################################################
build_info_file="$WORKSPACE_ROOT/BUILD"
release_info_file="${REPO_ROOT}/${RELEASE_INFO_FILE}"
if [[ -n "$SW_VERSION" ]] ; then
sw_version="$SW_VERSION"
elif [[ -n "$release_info_file" ]] ; then
sw_version=$(grep "PLATFORM_RELEASE=" "$release_info_file" | cut -d = -f 2 | tr -d '"')
[[ -n "$sw_version" ]] || die "unable to determine SW_VERSION"
else
die "unable to determine SW_VERSION"
fi
cat >"$build_info_file" <<_END
###
### Wind River Cloud Platform
### Release $sw_version
###
### Wind River Systems, Inc.
###
SW_VERSION="$sw_version"
BUILD_TARGET="Host Installer"
BUILD_TYPE="Formal"
BUILD_ID="$TIMESTAMP"
SRC_BUILD_ID="$BUILD_NUMBER"
JOB="$JOB_NAME"
BUILD_BY="$USER"
BUILD_NUMBER="$BUILD_NUMBER"
BUILD_HOST="$HOSTNAME"
BUILD_DATE="$(date '+%F %T %z')"
_END
notice "$build_info_file"
cat "$build_info_file"

24
scripts/create-changelog.sh Executable file
View File

@ -0,0 +1,24 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
source $(dirname "$0")/../lib/changelog_utils.sh
require_env BUILD_HOME
require_env FORCE_BUILD
require_env BUILD_DOCKER_IMAGES_DEV
require_env BUILD_DOCKER_IMAGES_STABLE
load_build_env
rm -f "$BUILD_HOME"/{CHANGELOG*,LAST_COMMITS,NEED_BUILD,NO_BUILD_REQUIRED}
(
MY_WORKSPACE="$BUILD_HOME"
MY_REPO_ROOT_DIR="$BUILD_HOME/$REPO_ROOT_SUBDIR"
set +x
if need_build ; then
create_standard_changelogs
fi
)

92
scripts/docker-login.sh Executable file
View File

@ -0,0 +1,92 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_HOME
require_env PUSH_DOCKER_IMAGES
declare_env DOCKER_CONFIG_FILE
load_build_env
HOST_WORKSPACE="$BUILD_HOME/workspace"
# login not required
$PUSH_DOCKER_IMAGES || bail "PUSH_DOCKER_IMAGES=$PUSH_DOCKER_IMAGES, docker login not required"
# find registries that require a login
declare dummy
declare login_spec login_reg
declare -a login_repos
declare -A login_repos_hash
# for each registry that requires a login
for login_spec in $DOCKER_REGISTRY_PUSH_LOGIN_LIST ; do
read login_reg dummy <<<$(parse_docker_registry "$login_spec")
# check if we intend to push to it
declare spec reg
for spec in $DOCKER_REGISTRY $DOCKER_EXTRA_REGISTRY_PREFIX_LIST ; do
read reg dummy <<<$(parse_docker_registry "$spec")
if [[ "$reg" == "$login_reg" && -z "${login_repos_hash[$reg]}" ]] ; then
login_repos_hash["$reg"]=1
login_repos+=("$reg")
fi
done
done
unset dummy login_spec login_reg spec reg
unset login_repos_hash
[[ "${#login_repos[@]}" -gt 0 ]] || bail "no push registries requiring authentication defined, docker login not required"
#
# Merge usernames & passwords from $DOCKER_CONFIG_FILE into
# $HOME/.docker/config.json inside the builder pod
#
# {
# "auths": {
# "repo.org:port": {
# "auth": "..." # base64-encoded USERNAME:PASSWORD
# },
# ...
# }
# }
#
if [[ -z "$DOCKER_CONFIG_FILE" ]] ; then
DOCKER_CONFIG_FILE=~/.docker/config.json
elif [[ ! $DOCKER_CONFIG =~ ^/ ]] ; then
DOCKER_CONFIG_FILE="$BUILD_HOME/$DOCKER_CONFIG_FILE"
fi
require_file "$DOCKER_CONFIG_FILE"
notice "updating \$HOME/.docker/config.json in builder pod"
mkdir -p "$HOST_WORKSPACE/tmp"
old_docker_config_file="$HOST_WORKSPACE/tmp/docker.config.json.old"
new_docker_config_file="$HOST_WORKSPACE/tmp/docker.config.json.new"
rm -f "$old_docker_config_file"
# download the existing config file from the pod
QUIET=true stx_docker_cmd "[[ -f \$HOME/.docker/config.json ]] && 'cp' \$HOME/.docker/config.json \$MY_WORKSPACE/tmp/docker.config.json.old || true"
# merge the "auths" from DOCKER_CONFIG_FILE into it
$PYTHON3 -c '
import sys, json
ref_auths = json.load (open (sys.argv[1])).get ("auths", {})
try:
config = json.load (open (sys.argv[2]))
config.setdefault ("auths", {}).update (ref_auths)
except FileNotFoundError:
config = {
"auths": ref_auths
}
json.dump (config, open (sys.argv[3], "w"), indent = "\t")
' "$DOCKER_CONFIG_FILE" "$old_docker_config_file" "$new_docker_config_file"
# upload it back to the pod
if [[ ! -f "$old_docker_config_file" ]] || ! diff -q -u "$old_docker_config_file" "$new_docker_config_file" ; then
QUIET=true stx_docker_cmd "mkdir -p \$HOME/.docker && 'cp' \$MY_WORKSPACE/tmp/docker.config.json.new \$HOME/.docker/config.json"
fi
rm -f $old_docker_config_file $new_docker_config_file
notice "logging in to remote repos"
for reg in "${login_repos[@]}" ; do
docker_login "$reg"
done

View File

@ -0,0 +1,15 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_RT
load_build_env
build_types=std
if $BUILD_RT ; then
build_types+=",rt"
fi
stx_docker_cmd $DRY_RUN_ARG "\$MY_REPO/build-tools/stx/downloader -b -s -B $build_types"

View File

@ -0,0 +1,13 @@
#!/bin/bash
set -ex
: ${__REALLY_RUN_ME:?}
apt-get update
apt-get install apt-utils -y
chown "$1" .
groupadd --gid "$2" --non-unique "group-$2"
useradd --gid "$2" --uid "$1" --non-unique --no-create-home "user-$1"
/sbin/runuser "user-$1" -- ./create-deb-meta.sh

View File

@ -0,0 +1,17 @@
#!/bin/bash
set -ex
gen() {
apt-ftparchive \
-o APT::FTPArchive::AlwaysStat=1 \
-o CacheDir=cache \
-o Packages::Extensions='[ .deb, .udeb ]' \
"$@"
}
gen packages . >Packages
gzip -9 -c Packages >Packages.gz
gen release . >Release
sed -r -i 's#^(Date:\s*).*#\1'"$NOW"'#' Release
rm -f Packages

44
scripts/init-env.sh Executable file
View File

@ -0,0 +1,44 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
if [[ -d "$BUILD_HOME" ]] ; then
info "creating $BUILD_HOME"
mkdir -p "$BUILD_HOME"
fi
if [[ ! -f "$BUILD_HOME/build.conf" ]] ; then
info "$BUILD_HOME/build.conf: file not found"
info "creating $BUILD_HOME/build.conf.example"
cp "$TOP_SCRIPTS_DIR/templates/build.conf.example.in" "$BUILD_HOME/build.conf.example"
info "Please use the example file as the starting point"
exit 1
fi
load_build_config
set -x
for dir in "$BUILD_OUTPUT_ROOT" ; do
if [[ ! -d "$dir" ]] ; then
info "creating $dir"
mkdir -p "$dir"
fi
done
# Install source_me.sh to $BUILD_HOME
info "creating $BUILD_HOME/source_me.sh"
cp "$TOP_SCRIPTS_DIR/templates/source_me.sh.in" "$BUILD_HOME/source_me.sh"
# Delete old jenkins job list
if [[ -d "$BUILD_HOME/jenkins" ]] ; then
rm -f "$BUILD_HOME/jenkins/builds.txt"
else
mkdir "$BUILD_HOME/jenkins"
fi
# Create symlinks
mkdir -p "$BUILD_HOME/$REPO_ROOT_SUBDIR" "$BUILD_HOME/$WORKSPACE_ROOT_SUBDIR"
ln -sfn "$REPO_ROOT_SUBDIR" "$BUILD_HOME/repo"
ln -sfn "$WORKSPACE_ROOT_SUBDIR" "$BUILD_HOME/workspace"

View File

@ -0,0 +1,35 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_HOME
require_env BUILD_STREAM
require_env TIMESTAMP
load_build_env
$DRY_RUN && exit 0 || :
PUBLISH_DIR="$BUILD_OUTPUT_HOME/export"
notice "publishing $DOCKER_BASE_OS $BUILD_STREAM docker image lists"
src_dir="$STX_BUILD_HOME/workspace/std/build-images"
dst_dir="$PUBLISH_DIR/outputs/docker-images"
mkdir -p "$dst_dir"
declare -a find_args
or=
for os in $(echo $DOCKER_OS_LIST | sed 's/,/ /g') ; do
find_args+=(
$or
"-name" "images-$os-$BUILD_STREAM-versioned.lst" -o
"-name" "images-$os-$BUILD_STREAM-latest.lst"
)
or="-or"
done
if [[ ${#find_args[@]} -gt 0 ]] ; then
for src in $(find "$src_dir" -maxdepth 1 -type f \( "${find_args[@]}" \) ) ; do
cp -v "$src" "$dst_dir/"
done
fi

23
scripts/publish-helm-charts.sh Executable file
View File

@ -0,0 +1,23 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
load_build_env
$DRY_RUN && bail "DRY_RUN not supported, bailing out" || :
src_dir="$BUILD_OUTPUT_HOME/$WORKSPACE_ROOT_SUBDIR/std/build-helm/stx"
dst_dir="$PUBLISH_DIR/outputs/helm-charts"
files="$(
if [[ -d "$src_dir" ]] ; then
find "$src_dir" -mindepth 1 -maxdepth 1 -xtype f -name "*.tgz" || exit 1
fi
)"
if [[ -n "$files" ]] ; then
notice "copying helm charts to $dst_dir"
mkdir -p "$dst_dir"
echo "$files" | xargs -r \cp --force --no-dereference --preserve=mode,timestamps,links -t "$dst_dir"
fi

59
scripts/publish-iso.sh Executable file
View File

@ -0,0 +1,59 @@
#!/bin/bash
source $(dirname "$0")/../lib/job_utils.sh || exit 1
source $(dirname "$0")/../lib/publish_utils.sh || exit 1
load_build_env || exit 1
declare -a BUILD_TYPES=("std")
if $BUILD_RT ; then
BUILD_TYPES+=("rt")
fi
if $DRY_RUN ; then
bail "DRY_RUN=false is not supported, bailing out"
fi
TEMP_DIR="$BUILD_OUTPUT_HOME/tmp"
mkdir -p "$TEMP_DIR" || exit 1
checksum_files_list_file="$TEMP_DIR/published_iso_checksum_files"
find_checksum_files "${PUBLISH_SUBDIR}/outputs/std/iso" \
"${PUBLISH_SUBDIR}/outputs/rt/iso" \
"${PUBLISH_SUBDIR}/outputs/iso" \
>"$checksum_files_list_file" || exit 1
for build_type in "${BUILD_TYPES[@]}" ; do
dst_dir="${PUBLISH_DIR}/outputs/$build_type/iso"
checksum_file="$dst_dir/$CHECKSUMS_FILENAME"
regfile_list_file="$TEMP_DIR/iso_files_$build_type"
src_dir="$BUILD_OUTPUT_HOME/localdisk/lat/$build_type/deploy"
abs_src_dir="$(readlink -e "$src_dir")" || continue
rm -rf --one-file-system "$dst_dir" || exit 1
rm -f "$regile_list_file"
find "$src_dir" -xtype f -name 'starlingx*.iso' | sort | {
declare -a reg_files
while read iso_filename ; do
for filename in "$iso_filename" "${iso_filename%.iso}.sig" ; do
real_filename="$(readlink -e "$filename")" || continue
if ! in_list "$real_filename" "${reg_files[@]}" ; then
mkdir -p "$dst_dir" || exit 1
publish_file "$real_filename" "$dst_dir" "$checksum_files_list_file" >>"$checksum_file" || exit 1
reg_files+=("$real_filename")
fi
if [[ -L "$filename" ]] ; then
dst_link_target="$(basename "$real_filename")"
dst_link="$dst_dir/$(basename "$filename")"
ln -s -f -n "$dst_link_target" "$dst_link" || exit 1
echo "SYMLINK $dst_link" || exit 1
fi
done
done
}
check_pipe_status || exit 1
done

55
scripts/publish-logs.sh Executable file
View File

@ -0,0 +1,55 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env JENKINS_API_USERPASS
load_build_env
PUBLISH_DIR="$BUILD_OUTPUT_HOME/export"
# Remove TTY codes from a log file
sanitize_log() {
# See https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_(Control_Sequence_Introducer)_sequences
sed -r -e $'s#\033\\[[0-9:;<=>?]*[ !"#$%&\047()*+,./-]*[]@A-Z\\^_`a-z{|}~[]##g' -e 's#\r$##g' -e 's#\r#\n#g'
}
# This file contains JOB_NAME,BUILD_NUMBER,BUILD_URL,LABEL, one per line
if [[ -f "$BUILD_HOME/jenkins/builds.txt" ]] ; then
log_dir="${PUBLISH_DIR}/logs"
grep -v -E -e '^\s*(#.*)?*$' "$BUILD_HOME/jenkins/builds.txt" | {
FAILED=0
while IFS="," read job_name build_number build_url log_label ; do
if [[ -n "$job_name" && -n "${build_number}" ]] ; then
job_base_name="${job_name##*/}"
log_file="by-build-number/${job_base_name}-${build_number}.log.txt"
if [[ -n "$log_label" ]] ; then
log_link="${job_base_name}-${log_label}.log.txt"
else
log_link="${job_base_name}.log.txt"
fi
# download log from jenkins if it doesn't exist
if [[ -f "${log_dir}/${log_file}" ]] ; then
info "skipping ${log_dir}/${log_file} (file exists)"
continue
fi
info "downloading jenkins logs for $job_name #$build_number"
mkdir -p "${log_dir}"
mkdir -p "${log_dir}/by-build-number"
log_url="${build_url}/consoleText"
curl --fail --silent --show-error --location -u "$JENKINS_API_USERPASS" "${log_url}" \
| sanitize_log \
>"${log_dir}/${log_file}.tmp"
if ! check_pipe_status ; then
FAILED=1
rm -f "${log_dir}/${log_file}.tmp"
continue
fi
mv "${log_dir}/${log_file}.tmp" "${log_dir}/${log_file}"
ln -sfn "${log_file}" "${log_dir}/${log_link}"
fi
done
[[ $FAILED -eq 0 ]] || exit 1
}
fi

207
scripts/publish-packages.sh Executable file
View File

@ -0,0 +1,207 @@
#!/bin/bash
#set -e
source $(dirname "$0")/../lib/job_utils.sh || exit 1
source $(dirname "$0")/../lib/publish_utils.sh || exit 1
require_env BUILD_RT || exit 1
load_build_env || exit 1
unset GREP_OPTIONS GREP_COLORS GREP_COLOR
DEB_REPO_ORIGIN="starlingx"
BUILD_TYPES=("std")
if $BUILD_RT ; then
BUILD_TYPES+=("rt")
fi
PACKAGE_OUTPUTS_ROOT="$BUILD_OUTPUT_HOME/$WORKSPACE_ROOT_SUBDIR"
TEMP_DIR="$BUILD_OUTPUT_HOME/tmp"
do_publish_package_sources_and_binaries() {
local src_dir="$1"
local sources_dst_root="$2"
local packages_dst_root="$3"
local checksums_filename="$4"
local published_checksum_files_list_file="$5"
local src_root
src_root="$(dirname "$src_dir")"
local subdir
subdir="$(basename "$src_dir")"
sources_dst_dir="$sources_dst_root/$subdir"
packages_dst_dir="$packages_dst_root"
mkdir -p "$sources_dst_dir" "$packages_dst_dir"
rm -f "$sources_dst_dir/$checksums_filename"
find "$src_root/$subdir" -mindepth 1 -maxdepth 1 \
-type f \
-not -name '*buildinfo' \
-not -name '*changes' \
-not -name '*build' \
-not -name '*log' \
| while read filename ; do
if [[ "$filename" =~ [.]u?deb$ ]] ; then
dst_dir="$packages_dst_dir"
else
dst_dir="$sources_dst_dir"
fi
publish_file "$filename" "$dst_dir" "$published_checksum_files_list_file" >>"$dst_dir/$checksums_filename" || exit 1
done
check_pipe_status || exit 1
}
publish_package_sources_and_binaries() {
local checksum_files_list_file="$TEMP_DIR/published_package_checksum_files"
# Find old checksums
find_checksum_files "${PUBLISH_SUBDIR}/outputs/std/packages" \
"${PUBLISH_SUBDIR}/outputs/rt/packages" \
>"$checksum_files_list_file" || exit 1
# copy/link package files
local build_type
for build_type in "${BUILD_TYPES[@]}" ; do
notice "publishing $build_type package files"
local output_root="$PACKAGE_OUTPUTS_ROOT/$build_type"
local sources_dst_root="$PUBLISH_DIR/outputs/$build_type/sources"
local packages_dst_dir="$PUBLISH_DIR/outputs/$build_type/packages"
local -a find_cmd=(
find "$output_root" -mindepth 1 -maxdepth 1 \
-type d \
-not -name stamp \
-not -name build-helm \
-not -name build-images \
-not -name build-wheels'*' \
)
if [[ -n "$PARALLEL" ]] ; then
(
export -f check_pipe_status publish_file do_publish_package_sources_and_binaries
"${find_cmd[@]}" | sort | $PARALLEL \
do_publish_package_sources_and_binaries '{}' "$sources_dst_root" "$packages_dst_dir" \
"$CHECKSUMS_FILENAME" "$checksum_files_list_file"
check_pipe_status || exit 1
)
check_pipe_status || exit 1
else
"${find_cmd[@]}" | sort | while read src_dir ; do
do_publish_package_sources_and_binaries "$src_dir" "$sources_dst_root" "$packages_dst_dir" \
"$CHECKSUMS_FILENAME" "$checksum_files_list_file" || exit 1
done
check_pipe_status || exit 1
fi
notice "creating meta data in $packages_dst_dir"
make_deb_repo --origin="$DEB_REPO_ORIGIN" "$packages_dst_dir" || exit 1
done
}
publish_3rdparty_binaries() {
local src_dir="$BUILD_OUTPUT_HOME/mirrors/starlingx/binaries"
local dst_dir="$PUBLISH_DIR/inputs/packages"
local checksum_files_list_file="$TEMP_DIR/published_3rdparty_binaries_checksum_files"
local checksum_file="$dst_dir/$CHECKSUMS_FILENAME"
[[ -d "$src_dir" ]] || return
notice "publishing 3rd-party binaries"
mkdir -p "$dst_dir"
rm -f "$checksum_file"
find_checksum_files "${PUBLISH_SUBDIR}/inputs/packages" >"$checksum_files_list_file" || exit 1
local -a find_cmd=(
find "$src_dir" -mindepth 1 -maxdepth 1 -type f \( -name '*.deb' -o -name '*.udeb' \)
)
if [[ -n "$PARALLEL" ]] ; then
(
export -f check_pipe_status publish_file do_publish_package_files
"${find_cmd[@]}" | sort | $PARALLEL \
publish_file '{}' "$dst_dir" "$checksum_files_list_file" >>"$checksum_file"
check_pipe_status || exit 1
)
check_pipe_status || exit 1
else
"${find_cmd[@]}" | sort | while read filename ; do
publish_file "$filename" "$dst_dir" "$checksum_files_list_file" >>$checksum_file || exit 1
done
check_pipe_status || exit 1
fi
notice "creating meta data in $dst_dir"
make_deb_repo --origin="$DEB_REPO_ORIGIN" "$dst_dir" || exit 1
}
publish_3rdparty_sources() {
local src_root_dir="$BUILD_OUTPUT_HOME/mirrors/starlingx/sources"
local dst_root_dir="$PUBLISH_DIR/inputs/sources"
local checksum_files_list_file="$TEMP_DIR/published_3rdparty_sources_checksum_files"
[[ -d "$src_root_dir" ]] || return
notice "publishing 3rd-party sources"
find_checksum_files "${PUBLISH_SUBDIR}/outputs/std/sources" \
"${PUBLISH_SUBDIR}/outputs/rt/sources" \
"${PUBLISH_SUBDIR}/inputs/sources" \
>"$checksum_files_list_file" || exit 1
local -a find_cmd=(
find "$src_root_dir" -mindepth 1 -maxdepth 1 -type d
)
if [[ -n "$PARALLEL" ]] ; then
(
export -f check_pipe_status publish_file do_publish_3rdparty_sources
"${find_cmd[@]}" | sort | $PARALLEL \
do_publish_3rdparty_sources \
'{}' "$dst_root_dir" "$checksum_files_list_file" "$CHECKSUMS_FILENAME"
check_pipe_status || exit 1
)
check_pipe_status || exit 1
else
"${find_cmd[@]}" | sort | while read src_dir ; do
do_publish_3rdparty_sources \
"$src_dir" "$dst_root_dir" "$checksum_files_list_file" "$CHECKSUMS_FILENAME"
done
check_pipe_status || exit 1
fi
}
do_publish_3rdparty_sources() {
local src_dir="$1"
local dst_root_dir="$2"
local checksum_files_list_file="$3"
local checksums_filename="$4"
local subdir
subdir="$(basename "$src_dir")" || exit 1
local dst_dir="$dst_root_dir/$subdir"
mkdir -p "$dst_dir" || exit 1
local checksum_file="$dst_dir/$checksums_filename"
rm -f "$checksum_file" || exit 1
find "$src_dir" -mindepth 1 -maxdepth 1 -type f | sort | (
while read filename ; do
#echo "filename=$filename" >&2
#echo "dst_root_dir=$dst_root_dir subdir=$subdir" >&2
publish_file "$filename" "$dst_dir" "$checksum_files_list_file" >>"$checksum_file" || exit 1
done
)
check_pipe_status || exit 1
}
if $DRY_RUN ; then
bail "DRY_RUN=false is not supported, bailing out"
fi
mkdir -p "$TEMP_DIR"
publish_3rdparty_sources
publish_3rdparty_binaries
publish_package_sources_and_binaries

35
scripts/publish-wheels.sh Executable file
View File

@ -0,0 +1,35 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_HOME
require_env TIMESTAMP
require_env BUILD_STREAM
load_build_env
$DRY_RUN && bail "DRY_RUN not supported, bailing out" || :
src_dir="$STX_BUILD_HOME/workspace/std/build-wheels-$DOCKER_BASE_OS-$BUILD_STREAM"
dst_dir="$PUBLISH_DIR/outputs/wheels"
declare -a wheels_files=(
"$src_dir/stx-$DOCKER_BASE_OS-$BUILD_STREAM-wheels.tar"
"$src_dir/stx-$DOCKER_BASE_OS-$BUILD_STREAM-wheels-py2.tar"
)
declare -a existing_wheels_files
for f in "${wheels_files[@]}" ; do
if [[ -f "$f" ]] ; then
existing_wheels_files+=("$f")
fi
done
if [[ "${#existing_wheels_files[@]}" -gt 0 ]] ; then
notice "publish wheels files to dst_dir"
for wheels_file in "${existing_wheels_files[@]}" ; do
[[ -f "$wheels_file" ]] || continue
\cp --force --no-dereference --preserve=mode,timestamps,links -t "$dst_dir" "$wheels_file"
done
fi

39
scripts/record-build-status.sh Executable file
View File

@ -0,0 +1,39 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_STATUS
load_build_env
if $DRY_RUN ; then
bail "DRY_RUN=true, bailing out..."
fi
touch "$BUILD_OUTPUT_HOME/FAIL"
ARCHIVE_ROOT=$(dirname "$BUILD_OUTPUT_HOME")
if [[ "$BUILD_STATUS" == "success" ]] ; then
ARCHIVE_ROOT=$(dirname "$BUILD_OUTPUT_HOME")
link_target=$(basename "$BUILD_OUTPUT_HOME")
cp "$BUILD_OUTPUT_HOME/LAST_COMMITS" "$ARCHIVE_ROOT/"
ln -sfn "$link_target" "$ARCHIVE_ROOT/latest_build"
if "$BUILD_DOCKER_IMAGES" ; then
cp "$BUILD_OUTPUT_HOME/LAST_COMMITS" "$ARCHIVE_ROOT/LAST_COMMITS_IMG_STABLE"
ln -sfn "$link_target" "$ARCHIVE_ROOT/latest_docker_image_build"
fi
rm -f "$BUILD_OUTPUT_HOME/FAIL"
touch "$BUILD_OUTPUT_HOME/SUCCESS"
mkdir -p "$PUBLISH_ROOT"
if ! same_path "$PUBLISH_ROOT" "$ARCHIVE_ROOT" ; then
link_target="${PUBLISH_ROOT}/$PUBLISH_TIMESTAMP"
if [[ -d "$link_target" ]] ; then
ln -sfn "$PUBLISH_TIMESTAMP" "$PUBLISH_ROOT/latest_build"
fi
fi
fi

61
scripts/sign-iso.sh Executable file
View File

@ -0,0 +1,61 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_HOME
require_env BUILD_ISO
require_env BUILD_RT
load_build_env
require_env SIGN_ISO
$SIGN_ISO || bail "SIGN_ISO=false, bailing out"
require_env SIGNING_SERVER
require_env SIGNING_USER
$BUILD_ISO || bail "BUILD_ISO=false, bailing out"
$SIGN_ISO || bail "SIGN_ISO=false, bailing out"
[[ -n "$SIGNING_SERVER" ]] || bail "SIGNING_SERVER is empoty, bailing out"
sign_iso() {
local iso_file="$1"
(
export MY_REPO=$REPO_ROOT/cgcs-root
export MY_WORKSPACE=$WORKSPACE_ROOT
export PATH=$MY_REPO/build-tools:$PATH:/usr/local/bin
sig_file="${iso_file%.iso}.sig"
maybe_run rm -f "$sig_file"
maybe_run sign_iso_formal.sh "$iso_file" || die "failed to sign ISO"
if ! $DRY_RUN ; then
[[ -f "$sig_file" ]] || die "failed to sign ISO"
info "created signature $sig_file"
fi
)
}
declare -a iso_files
iso_files+=($BUILD_HOME/localdisk/lat/std/deploy/starlingx-intel-x86-64-cd.iso)
if $BUILD_RT ; then
iso_files+=($BUILD_HOME/localdisk/lat/rt/deploy/starlingx-intel-x86-64-cd.iso)
fi
for iso_file in "${iso_files[@]}" ; do
if [[ -L "$iso_file" ]] ; then
iso_link_target="$(readlink "$iso_file")" || exit 1
[[ -n "$iso_link_target" ]] || die "failed to read symlink $iso_file"
[[ ! "$iso_link_target" =~ ^/ ]] || die "$iso_file: link target must not include slashes"
real_iso_file="$(dirname "$iso_file")/$iso_link_target"
sign_iso "$real_iso_file"
sig_file="${iso_file%.iso}.sig"
sig_link_target="${iso_link_target%.iso}.sig"
if ! $DRY_RUN ; then
ln -sfn "$sig_link_target" "$sig_file" || exit 1
info "created signature link $sig_file => $sig_link_target"
fi
else
sign_iso "$iso_file"
fi
done

40
scripts/start-containers.sh Executable file
View File

@ -0,0 +1,40 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_HOME
require_env REBUILD_BUILDER_IMAGES
require_env USE_DOCKER_CACHE
set -x
load_build_env
# start containers
if $USE_DOCKER_CACHE ; then
cache_opts="--cache"
fi
if $REBUILD_BUILDER_IMAGES ; then
notice "rebuilding & starting containers"
./stx-init-env --rebuild $cache_opts
else
notice "rebuilding containers"
bash ./stx-init-env $cache_opts
fi
# wait for startup
notice "waiting for containers to startup ($BUILDER_POD_STARTUP_TIMEOUT seconds)"
let deadline="$(date '+%s')+$BUILDER_POD_STARTUP_TIMEOUT"
while [[ "$(stx control status | grep -i running | wc -l)" -lt 5 ]] ; do
if [[ "$(date '+%s')" -ge $deadline ]] ; then
die "pods didn't start up after $BUILDER_POD_STARTUP_TIMEOUT second(s)"
fi
sleep 10
done
stx control status
# finish setup
stx build prepare
# workaround for: https://bugs.launchpad.net/starlingx/+bug/1981094
stx shell -c 'sudo mkdir -p /var/cache/apt/archives/partial && sudo chmod +rx /var/cache/apt/archives/partial'

16
scripts/stop-containers.sh Executable file
View File

@ -0,0 +1,16 @@
#!/bin/bash
set -e
source $(dirname "$0")/../lib/job_utils.sh
require_env BUILD_HOME
load_build_config
if [[ ! -f "$BUILD_HOME/$REPO_ROOT_SUBDIR/stx-tools/import-stx" ]] ; then
warn "$BUILD_HOME/$REPO_ROOT_SUBDIR/stx-tools/import-stx: file doesn't exist"
warn "Can't stop containers, bailing out"
exit 0
fi
load_build_env
stx control stop || true

View File

@ -0,0 +1,149 @@
##################################################
# Build configuration
##################################################
# Build user
BUILD_USER="jenkins"
# User name & email
USER_NAME="Jenkins"
USER_EMAIL="davlet.panech@windriver.com"
# Branch name, used as part of build directories & artifacts
BUILD_BRANCH="master"
# Project name, must be unique for a given BUILD_USER
PROJECT_ID="dpanech-debian"
# Should be the same as PROJECT_ID, but must contain only lower-case letters,
# digits and dashes. It will be used as the k8s namespace and as part of the
# build tools helm chart & service IDs.
PROJECT="$(echo $PROJECT_ID | sed -r 's/[^a-zA-Z0-9-]+/-/g' | tr A-Z a-z)"
# Repo manifest to clone
MANIFEST_URL="https://opendev.org/starlingx/manifest"
MANIFEST_BRANCH="master"
MANIFEST="default.xml"
# File containing product release information, relative to repo root
RELEASE_INFO_FILE="cgcs-root/stx/utilities/utilities/build-info/release-info.inc"
# Alternatively, set version explicitly
#SW_VERSION="22.06"
# How many times to call "build-pkgs"
BUILD_PACKAGES_ITERATIONS=3
# Debian snapshot URLs. Leave them empty to use the defaults hard-coded
# in stx/tools
DEBIAN_SNAPSHOT_BASE="http://https://snapshot.debian.org/archive/debian"
DEBIAN_SECURITY_SNAPSHOT_BASE="https://snapshot.debian.org/archive/debian-security"
# ISO sigining
SIGN_ISO=false # If false, don't signe the ISO
SIGNING_SERVER="some.host.org"
SIGNING_USER="some_user_id"
##################################################
# Build outputs
##################################################
# Archive artifacts in $BUILD_OUTPUT_ROOT/timestamp
BUILD_OUTPUT_ROOT="/localdisk/loadbuild/$BUILD_USER/$PROJECT_ID"
# Publish import artifacts to: $PUBLISH_ROOT/<PUBLISH_TIMESTAMP>/$PUBLISH_SUBDIR
# Create latest symlink in: $PUBLISH_ROOT/latest_build
# CENGN: publish to a location outside of the archive directory
#PUBLISH_ROOT="/export/mirrors/$PROJECT_ID/$MANIFEST_BRANCH/debian/monolithic"
#PUBLISH_ROOT_URL="http://$(hostname -f):8088${PUBLISH_ROOT}"
#PUBLISH_SUBDIR="" # may be empty
#PUBLISH_LATEST_LINK=true # create latest symlink?
# For private builds, publish to a subdirectory of the archive location
PUBLISH_ROOT="$BUILD_OUTPUT_ROOT"
PUBLISH_ROOT_URL="http://$(hostname -f):8088${PUBLISH_ROOT}"
PUBLISH_SUBDIR="export" # may be empty
PUBLISH_LATEST_LINK=false # create latest symlink?
##################################################
# Docker configuration
##################################################
# How many times to try building docker images
DOCKER_BUILD_RETRY_COUNT=3
# Docker config file used for authentication when pushing images. If empty,
# defaults to $USER/.docker/config.json. May be relative to $BUILD_HOME.
DOCKER_CONFIG_FILE="docker-config.json"
# Push docker images to this registry. Empty value means docker hub.
#DOCKER_REGISTRY="some.host.org:1234"
DOCKER_REGISTRY=
# Prefix docker image names with this namespace
#DOCKER_REGISTRY_ORG="starlingx"
DOCKER_REGISTRY_ORG="SOME_USER"
# Additional "REGISTRY/NAMESPACE" to push docker images to,
# may contain multiple elements, space-separated
DOCKER_EXTRA_REGISTRY_PREFIX_LIST=
# Space separated list of docker registries that require authentication
# before pushing.
#
# Credentials must be in $DOCKER_CONFIG_FILE
#DOCKER_REGISTRY_PUSH_LOGIN_LIST="some.host.org:1234"
DOCKER_REGISTRY_PUSH_LOGIN_LIST=
# Disable SSL validation for these docker registries, space-separasted
INSECURE_DOCKER_REGISTRIES="some.host.org:1234 some.other.host.org:1234"
# Use pod URLs in apt sources.list within the base image. This makes apt
# sources in docker images unuseable outside of the builder pods.
USE_POD_URLS_IN_DOCKER_IMAGES="false"
# Docker base image OS
DOCKER_BASE_OS="debian"
# Docker image OS categories to build
DOCKER_OS_LIST="debian distroless"
# Base image for all docker images. If unset, we will build our own.
DOCKER_IMAGE_BASE=
##################################################
# Jenkins & K8S parameters
##################################################
# Must be <= k8s container network's MTU
CONTAINER_MTU="1410"
# "kubernetes" or "minikube"
K8S_PLATFORM="kubernetes"
# Create builder pods in this k8s namespace
K8S_NAMESPACE="$PROJECT_ID"
# Start up time out for builder pods in seconds
BUILDER_POD_STARTUP_TIMEOUT="300"
# Used by the stx tool (?)
BUILD_CPUS=4
# Configuration for kubectl, if empty defaults to $USER/.kube/config
KUBECONFIG=/localdisk/designer/jenkins/dpanech-debian/kube-config
# Minikube memory (default: auto)
MINIKUBEMEMORY=
# Minikube home dir (default: $HOME), must not be NFS
MINIKUBE_HOME=
# A jenkins "credentials" record that contains the username/password for
# accessing Jenkins' REST API.
JENKINS_API_CREDENTIALS_ID="jenkins-api"
# GNU parallel - if not empty must be a command installed on host
# Makes some operations faster
PARALLEL_CMD="parallel"
PARALLEL_CMD_JOBS="12"

45
templates/source_me.sh.in Normal file
View File

@ -0,0 +1,45 @@
# bash
# vim: syn=sh:
# Define environment vars needed by the stx tool and jenkins scripts
# source build.conf
if [[ -z "$BUILD_CONF" ]] ; then
echo "reading $(dirname "${BASH_SOURCE[0]}")/build.conf" >&2
source "$(dirname "${BASH_SOURCE[0]}")/build.conf" || return 1
else
echo "reading $BUILD_CONF"
source "$BUILD_CONF" || return 1
fi
if [[ "$BUILD_USER" != "$(id -un)" ]] ; then
echo "This script can be used only by user $BUILD_USER!" >&2
exit 1
fi
BUILD_HOME="$(readlink -f "$(dirname "${BASH_SOURCE[0]}")")"
# These are named differently in the stx tool
export STX_BUILD_HOME="$BUILD_HOME"
export STX_BUILD_CPUS="$BUILD_CPUS"
export STX_PLATFORM="$K8S_PLATFORM"
export STX_INSECURE_DOCKER_REGISTRIES="$INSECURE_DOCKER_REGISTRIES"
export STX_K8S_NAMESPACE="$K8S_NAMESPACE"
export STX_BUILD_CPUS="$BUILD_CPUS"
export STX_CONTAINER_MTU="$CONTAINER_MTU"
if [[ -z "$KUBECONFIG" ]] ; then
KUBECONFIG="$HOME/.kube/config"
fi
export KUBECONFIG
export PROJECT
# source import-stx if it exists
if [[ -f "$BUILD_HOME/repo/stx-tools/import-stx" ]] ; then
echo "cd $BUILD_HOME/repo/stx-tools" >&2
cd "$BUILD_HOME/repo/stx-tools" || return 1
echo "sourcing ./import-stx" >&2
source ./import-stx || return 1
else
echo "WARNING: Build tools environment script $BUILD_HOME/repo/stx-tools/import-stx doesn't exist" >&2
fi