CENGN compatibility + misc changes
* moved lib/ => scripts/lib/ * moved pipelines/vars => vars/ * lib/job_utils.sh: protect /home/{localdisk,loadbuild} * lib/publish_utils.sh: don't copy xattrs when publishing * pipelines/monolithic.Jenkinsfile: print BUILD_OUTPUT_HOME_URL & PUBLISH_URL at the end * pipelines/parts/*.Jenkinsfile: add missing job parameters * scripts/00_junk: deleted unused directory * scripts/{build,archive}-helm-charts.sh: don't clobber helm charts output with multiple invocations * scripts/build-docker*.sh: omit --registry if not defined * scripts/docker-login.sh: - handle empty registry (ie docker.io) - fix $DOCKER_CONFIG_FILE being absolute * scripts/print-config.sh: - also print PUBLISH_URL * scripts/record-build-status.sh: - add missing function "same_path" Signed-off-by: Davlet Panech <davlet.panech@windriver.com>
This commit is contained in:
parent
84ccb6d6c7
commit
ee100b99a6
lib
pipelines
monolithic.Jenkinsfile
parts
build-docker-base.Jenkinsfilebuild-docker-images.Jenkinsfilebuild-helm-charts.Jenkinsfileclone-source.Jenkinsfilecreate-changelog.Jenkinsfilepublish-logs.Jenkinsfilestop-containers.Jenkinsfile
vars
scripts
00_junk
archive-docker-images.sharchive-helm-charts.sharchive-iso.sharchive-misc.sharchive-packages.sharchive-prerequisites.sharchive-wheels.shbuild-docker-base.shbuild-docker-images.shbuild-helm-charts.shbuild-iso.shbuild-packages.shbuild-wheels.shclean-build.shclone-source.shconfigure-build.shcreate-changelog.shdocker-login.shdownload-prerequisites.shinit-env.shlib
build_utils.shchangelog_utils.shglob_utils.shjob_utils.shlog_utils.shpublish_utils.shretries.shutils.sh
print-config.shpublish-docker-images.shpublish-helm-charts.shpublish-iso.shpublish-logs.shpublish-packages.shpublish-wheels.shrecord-build-status.shsign-iso.shstart-containers.shstop-containers.shtemplates
vars
@ -1,141 +0,0 @@
|
||||
#
|
||||
# Copyright (c) 2022 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
FD_SHA=0
|
||||
FD_NAME=1
|
||||
FD_INODE=2
|
||||
FD_PATH=3
|
||||
|
||||
fu_debug () {
|
||||
>&2 echo "DEBUG: ${1}"
|
||||
}
|
||||
|
||||
fu_error () {
|
||||
>&2 echo "ERROR: ${1}"
|
||||
}
|
||||
|
||||
get_file_data_from_path () {
|
||||
local path="${1}"
|
||||
local sha=""
|
||||
sha="$(sha256sum "${path}" | cut -d ' ' -f 1; return ${PIPESTATUS[0]})"
|
||||
if [ $? -ne 0 ]; then
|
||||
return 1
|
||||
fi
|
||||
echo "$sha $(basename ${path}) $(stat --format=%i ${path}) ${path}"
|
||||
}
|
||||
|
||||
get_file_data_from_dir () {
|
||||
local directory="${1}"
|
||||
local list_file="${2}"
|
||||
|
||||
local d
|
||||
local line
|
||||
local fields
|
||||
|
||||
for d in $(find $directory -type d | grep -v 'repodata'); do
|
||||
sha256sum $d/*.deb $d/*.rpm $d/*.tar $d/*.tgz $d/*.gz $d/*.bz2 $d/*.xz 2> /dev/null | \
|
||||
while read line; do
|
||||
fields=( $(echo $line) )
|
||||
echo "${fields[0]} $(basename ${fields[1]}) $(stat --format=%i ${fields[1]}) ${fields[1]}"
|
||||
done
|
||||
done > ${list_file}.unsorted
|
||||
sort ${list_file}.unsorted > ${list_file}
|
||||
\rm -f ${list_file}.unsorted
|
||||
}
|
||||
|
||||
is_merge_candidate () {
|
||||
local array1=( ${1} )
|
||||
local array2=( ${2} )
|
||||
|
||||
fu_debug "is_merge_candidate ${1}"
|
||||
fu_debug " vs ${2}"
|
||||
if [ "${array1[$FD_SHA]}" != "${array2[$FD_SHA]}" ]; then
|
||||
fu_debug "shas differ"
|
||||
return 1
|
||||
elif [ "${array1[$FD_NAME]}" != "${array2[$FD_NAME]}" ]; then
|
||||
fu_debug "names differ"
|
||||
return 1
|
||||
elif [ "${array1[$FD_INODE]}" = "${array2[$FD_INODE]}" ]; then
|
||||
fu_debug "inodes already the same"
|
||||
return 1
|
||||
elif [ "${array1[$FD_FPATH]}" = "${array2[$FD_PATH]}" ]; then
|
||||
fu_debug "paths already the same"
|
||||
return 1
|
||||
fi
|
||||
|
||||
fu_debug "merge candidates:"
|
||||
fu_debug " ${array1[$FD_PATH]}"
|
||||
fu_debug " ${array2[$FD_PATH]}"
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
cp_or_link () {
|
||||
local src_file="${1}"
|
||||
local dest_dir="${2}"
|
||||
shift 2
|
||||
local lst_files=( "${@}" )
|
||||
local lst_file
|
||||
local src_name
|
||||
local lnk_line
|
||||
local src_line
|
||||
local lnk_array=()
|
||||
local src_array=()
|
||||
|
||||
if [ ! -d "${dest_dir}" ]; then
|
||||
fu_error "destination directory '${dest_dir}' not found"
|
||||
return 1
|
||||
fi
|
||||
|
||||
src_name=$(basename ${src_file})
|
||||
src_line="$(get_file_data_from_path "${src_file}")" || return 1
|
||||
src_array=( ${src_line} )
|
||||
|
||||
if [ -f "${dest_dir}/${src_name}" ]; then
|
||||
lnk_line="$(get_file_data_from_path "${dest_dir}/${src_name}")" || return 1
|
||||
lnk_array=( ${lnk_line} )
|
||||
# echo "src_line=${src_line}"
|
||||
# echo "lnk_line=${lnk_line}"
|
||||
if [ "${lnk_array[$FD_SHA]}" == "${src_array[$FD_SHA]}" ]; then
|
||||
echo "Already have ${src_name}"
|
||||
return 0
|
||||
fi
|
||||
fu_error "destination file '${dest_dir}/${src_name}' already exists"
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
for lst_file in "${lst_files[@]}"; do
|
||||
fu_debug "grep '${src_name}' in '${lst_file}'"
|
||||
grep "${src_name}" "${lst_file}" | \
|
||||
while read lnk_line; do
|
||||
if is_merge_candidate "$lnk_line" "$src_line" "${merge}" ; then
|
||||
lnk_array=( ${lnk_line} )
|
||||
fu_debug "ln ${lnk_array[$FD_PATH]} ${dest_dir}/${src_name}"
|
||||
\ln ${lnk_array[$FD_PATH]} ${dest_dir}/${src_name}
|
||||
if [ $? -ne 0 ]; then
|
||||
fu_error "ln ${lnk_array[$FD_PATH]} ${dest_dir}/${src_name}"
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
done || return 0
|
||||
done
|
||||
|
||||
fu_debug "cp $src_file ${dest_dir}/"
|
||||
\cp $src_file ${dest_dir}/
|
||||
}
|
||||
|
||||
__make_deb_repo () {
|
||||
local root_dir="${1}"
|
||||
|
||||
pushd "${root_dir}" || return 1
|
||||
# FIXME: Release file not valid
|
||||
dpkg-scanpackages . /dev/null > Release
|
||||
dpkg-scanpackages . /dev/null | gzip -9c > Packages.gz
|
||||
popd
|
||||
}
|
@ -18,6 +18,7 @@ def parseProps(text) {
|
||||
|
||||
def loadEnv() {
|
||||
def data = {}
|
||||
data.NEED_BUILD = false
|
||||
ws(params.BUILD_HOME) {
|
||||
if (fileExists ("NEED_BUILD")) {
|
||||
data.NEED_BUILD = true
|
||||
@ -26,6 +27,7 @@ def loadEnv() {
|
||||
final String configText = sh (script: "${Constants.SCRIPTS_DIR}/print-config.sh", returnStdout: true)
|
||||
final props = parseProps (configText)
|
||||
data.BUILD_OUTPUT_HOME_URL = props.BUILD_OUTPUT_HOME_URL
|
||||
data.PUBLISH_URL = props.PUBLISH_URL
|
||||
return data
|
||||
}
|
||||
|
||||
@ -46,13 +48,23 @@ def runPart (name, params = []) {
|
||||
|
||||
def printBuildFooter(final props) {
|
||||
if (props) {
|
||||
echo """
|
||||
========================================
|
||||
|
||||
Build output: ${props.BUILD_OUTPUT_HOME_URL}
|
||||
|
||||
========================================
|
||||
"""
|
||||
String msg = ""
|
||||
msg += "\n"
|
||||
msg += "========================================\n"
|
||||
msg += "\n"
|
||||
if (props.NEED_BUILD) {
|
||||
msg += "Build output: ${props.BUILD_OUTPUT_HOME_URL}\n"
|
||||
if (props.PUBLISH_URL) {
|
||||
msg += "Publish output: ${props.PUBLISH_URL}\n"
|
||||
}
|
||||
}
|
||||
else {
|
||||
echo "*** NO CHANGES - BUILD NOT REQUIRED"
|
||||
}
|
||||
msg += "\n"
|
||||
msg += "========================================\n"
|
||||
msg += "\n"
|
||||
echo (msg)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,9 @@ pipeline {
|
||||
string (
|
||||
name: 'BUILD_HOME'
|
||||
)
|
||||
string (
|
||||
name: 'TIMESTAMP'
|
||||
)
|
||||
booleanParam (
|
||||
name: 'DRY_RUN'
|
||||
)
|
||||
|
@ -22,6 +22,9 @@ pipeline {
|
||||
string (
|
||||
name: 'BUILD_HOME'
|
||||
)
|
||||
string (
|
||||
name: 'TIMESTAMP'
|
||||
)
|
||||
booleanParam (
|
||||
name: 'DRY_RUN'
|
||||
)
|
||||
|
@ -22,6 +22,9 @@ pipeline {
|
||||
string (
|
||||
name: 'BUILD_HOME'
|
||||
)
|
||||
string (
|
||||
name: 'TIMESTAMP'
|
||||
)
|
||||
booleanParam (
|
||||
name: 'DRY_RUN'
|
||||
)
|
||||
|
@ -28,6 +28,9 @@ pipeline {
|
||||
string (
|
||||
name: 'PUBLISH_TIMESTAMP'
|
||||
)
|
||||
booleanParam (
|
||||
name: 'DRY_RUN'
|
||||
)
|
||||
string (
|
||||
name: 'REFRESH_SOURCE'
|
||||
)
|
||||
|
@ -38,19 +38,10 @@ pipeline {
|
||||
name: 'BUILD_DOCKER_IMAGES_STABLE'
|
||||
)
|
||||
}
|
||||
environment {
|
||||
PATH = "/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin"
|
||||
SCRIPTS_DIR = "${WORKSPACE}/v3/scripts"
|
||||
BUILD_HOME = "${BUILD_HOME}"
|
||||
TIMESTAMP = "${TIMESTAMP}"
|
||||
FORCE_BUILD = "${FORCE_BUILD}"
|
||||
BUILD_DOCKER_IMAGES_DEV = "${BUILD_DOCKER_IMAGES_DEV}"
|
||||
BUILD_DOCKER_IMAGES_STABLE = "${BUILD_DOCKER_IMAGES_STABLE}"
|
||||
}
|
||||
stages {
|
||||
stage ("create-changelog") {
|
||||
steps {
|
||||
sh ("${SCRIPTS_DIR}/create-changelog.sh")
|
||||
sh ("${Constants.SCRIPTS_DIR}/create-changelog.sh")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -46,11 +46,14 @@ pipeline {
|
||||
fi
|
||||
"""
|
||||
);
|
||||
if (!jenkins_api_credentials_id) {
|
||||
error ("JENKINS_API_CREDENTIALS_ID is not defined in ${build_conf}")
|
||||
}
|
||||
withEnv (["BUILD_HOME=${params.BUILD_HOME}"]) {
|
||||
withCredentials ([usernameColonPassword (
|
||||
credentialsId: jenkins_api_credentials_id,
|
||||
variable: 'JENKINS_API_USERPASS')]) {
|
||||
sh "v3/scripts/publish-logs.sh"
|
||||
sh "${Constants.SCRIPTS_DIR}/publish-logs.sh"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -29,16 +29,10 @@ pipeline {
|
||||
name: 'PUBLISH_TIMESTAMP'
|
||||
)
|
||||
}
|
||||
environment {
|
||||
PATH = "/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin"
|
||||
SCRIPTS_DIR = "${WORKSPACE}/v3/scripts"
|
||||
BUILD_HOME = "${BUILD_HOME}"
|
||||
TIMESTAMP = "${TIMESTAMP}"
|
||||
}
|
||||
stages {
|
||||
stage ("stop-containers") {
|
||||
steps {
|
||||
sh ("bash ${SCRIPTS_DIR}/stop-containers.sh")
|
||||
sh ("${Constants.SCRIPTS_DIR}/stop-containers.sh")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,2 +0,0 @@
|
||||
@groovy.transform.Field
|
||||
def SCRIPTS_DIR = "${WORKSPACE}/v3/scripts"
|
@ -1,90 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
#
|
||||
# Copyright (c) 2022 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
|
||||
require_env BUILD_RT
|
||||
|
||||
load_build_env
|
||||
|
||||
if $DRY_RUN ; then
|
||||
bail "DRY_RUN=true not supported, bailing out"
|
||||
fi
|
||||
|
||||
DISTRTO=debian
|
||||
declare -a BUILD_TYPES=("std")
|
||||
if $BUILD_RT ; then
|
||||
BUILD_TYPES+=("rt")
|
||||
fi
|
||||
|
||||
make_deb_repo() {
|
||||
gen_deb_repo_meta_data $DRY_RUN_ARG "$@"
|
||||
}
|
||||
|
||||
hardlink_or_copy_file() {
|
||||
local src_file="$1"
|
||||
local dst_file="$2"
|
||||
: <"$src_file" || exit 1
|
||||
rm -f "$dst_file"
|
||||
ln -n "$src_file" "$dst_file" || cp "$src_file" "$dst_file" || exit 1
|
||||
}
|
||||
|
||||
# -----------------------
|
||||
|
||||
set -x
|
||||
|
||||
RETRIES=2
|
||||
RETRY_INTERVAL_SEC=5
|
||||
|
||||
echo "PUBLISH: TIMESTAMP=${TIMESTAMP}"
|
||||
echo "PUBLISH: BUILD_OUTPUT_HOME=${BUILD_OUTPUT_HOME}"
|
||||
echo "PUBLISH: BUILD_HOME=${BUILD_HOME}"
|
||||
echo "PUBLISH: DISTRO=${DISTRO}"
|
||||
echo "PUBLISH: MANIFEST_BRANCH=${MANIFEST_BRANCH}"
|
||||
export
|
||||
|
||||
source ${LIB_DIR}/retries.sh
|
||||
source ${LIB_DIR}/file_utils.sh
|
||||
|
||||
function with_default_retries {
|
||||
local cmd=$1
|
||||
shift 1
|
||||
with_retries ${RETRIES:-1} ${RETRY_INTERVAL_SEC:-1} "${cmd}" "$@"
|
||||
}
|
||||
|
||||
PUBLISH_OUTPUTS_DIR="${PUBLISH_DIR}/outputs"
|
||||
|
||||
ISO_OUTPUT="${BUILD_OUTPUT_HOME}/localdisk/deploy"
|
||||
if [ -d "${ISO_OUTPUT}" ]; then
|
||||
PUBLISH_ISO_DIR="${PUBLISH_OUTPUTS_DIR}/iso"
|
||||
with_default_retries mkdir -p ${PUBLISH_ISO_DIR}
|
||||
for ISO in $(find ${ISO_OUTPUT} -name 'starlingx*.iso'); do
|
||||
B_NAME=$(basename "${ISO}")
|
||||
|
||||
if [ -L "${ISO}" ] ; then
|
||||
src_iso="$(readlink -f "${ISO}")" || exit 1
|
||||
else
|
||||
src_iso="${ISO}"
|
||||
fi
|
||||
src_sig="${src_iso%.iso}.sig"
|
||||
cp_or_link "${src_iso}" "${PUBLISH_ISO_DIR}"
|
||||
if [[ -f "$src_sig" ]] ; then
|
||||
cp -f "${src_sig}" "${PUBLISH_ISO_DIR}"
|
||||
fi
|
||||
link_target="$(basename "${src_iso}")"
|
||||
if [ "${link_target}" != "${B_NAME}" ] ; then
|
||||
ln -s -f -n "${link_target}" "${PUBLISH_ISO_DIR}/${B_NAME}" || exit 1
|
||||
sig_link_target="${link_target%.iso}.sig"
|
||||
sig_link="${PUBLISH_ISO_DIR}/${B_NAME%.iso}.sig"
|
||||
ln -s -f -n "${sig_link_target}" "${sig_link}"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
@ -1,146 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
#
|
||||
# Copyright (c) 2022 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
|
||||
require_env BUILD_RT
|
||||
declare_env DRY_RUN
|
||||
|
||||
load_build_env
|
||||
|
||||
BUILD_OUTPUT="$BUILD_OUTPUT_HOME"
|
||||
PUBLISH_BRANCH_ROOT="$BUILD_OUTPUT_HOME/export"
|
||||
declare -a BUILD_TYPES=("std")
|
||||
if $BUILD_RT ; then
|
||||
BUILD_TYPES+=("rt")
|
||||
fi
|
||||
|
||||
if $DRY_RUN ; then
|
||||
echo "DRY_RUN=true not supported, bailing out"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
make_deb_repo() {
|
||||
gen_deb_repo_meta_data $DRY_RUN_ARG "$@"
|
||||
}
|
||||
|
||||
# -----------------------
|
||||
|
||||
#set -x
|
||||
|
||||
RETRIES=2
|
||||
RETRY_INTERVAL_SEC=5
|
||||
|
||||
CHECKSUM_FN=stx-checksums
|
||||
|
||||
source ${LIB_DIR}/retries.sh
|
||||
source ${LIB_DIR}/file_utils.sh
|
||||
|
||||
function with_default_retries {
|
||||
local cmd=$1
|
||||
shift 1
|
||||
with_retries ${RETRIES:-1} ${RETRY_INTERVAL_SEC:-1} "${cmd}" "$@"
|
||||
}
|
||||
|
||||
PUBLISH_INPUTS_DIR="${PUBLISH_DIR}/inputs"
|
||||
PUBLISH_OUTPUTS_DIR="${PUBLISH_DIR}/outputs"
|
||||
|
||||
echo "PUBLISH: PUBLISH_ROOT=${PUBLISH_ROOT}"
|
||||
echo "PUBLISH: PUBLISH_INPUTS_DIR=${PUBLISH_INPUTS_DIR}"
|
||||
echo "PUBLISH: PUBLISH_OUTPUTS_DIR=${PUBLISH_OUTPUTS_DIR}"
|
||||
|
||||
# Search for checksum files
|
||||
# $PUBLISH_ROOT/<any layers>/<any timestamps>/$PUBLISH_SUBDIR
|
||||
CHECKSUM_FILES=$(
|
||||
if [[ -d "${PUBLISH_ROOT}" ]] ; then
|
||||
{ # timestamp dirs
|
||||
find "$PUBLISH_ROOT" -regextype posix-extended -mindepth 1 -maxdepth 1 -type d -regex '.*/[0-9]{4}.*$'
|
||||
} | { # publish subdir
|
||||
while read dir ; do
|
||||
if [[ -n "$PUBLISH_SUBDIR" && -d "$dir/$PUBLISH_SUBDIR" ]] ; then
|
||||
echo "$dir/$PUBLISH_SUBDIR"
|
||||
fi
|
||||
done
|
||||
} | { # checksums
|
||||
xargs -r -i find '{}' -type f -name "${CHECKSUM_FN}"
|
||||
}
|
||||
fi
|
||||
)
|
||||
|
||||
PKGS_INPUT="${BUILD_OUTPUT}/mirrors/starlingx/binaries"
|
||||
if [ -d "${PKGS_INPUT}" ]; then
|
||||
PUBLISH_INPUTS_PKG_DIR="${PUBLISH_INPUTS_DIR}/packages"
|
||||
with_default_retries mkdir -p ${PUBLISH_INPUTS_PKG_DIR}
|
||||
for PKG in $(find ${PKGS_INPUT} -name '*.deb'); do
|
||||
with_default_retries cp_or_link "${PKG}" "${PUBLISH_INPUTS_PKG_DIR}" $CHECKSUM_FILES
|
||||
done
|
||||
get_file_data_from_dir "${PUBLISH_INPUTS_PKG_DIR}" "${PUBLISH_INPUTS_PKG_DIR}/${CHECKSUM_FN}"
|
||||
CHECKSUM_FILES+=" ${PUBLISH_INPUTS_PKG_DIR}/${CHECKSUM_FN}"
|
||||
make_deb_repo "${PUBLISH_INPUTS_PKG_DIR}"
|
||||
fi
|
||||
|
||||
SRCS_INPUT="${BUILD_OUTPUT}/mirrors/starlingx/sources"
|
||||
echo "SRCS_INPUT=$SRCS_INPUT"
|
||||
if [ -d "${SRCS_INPUT}" ]; then
|
||||
PUBLISH_INPUTS_SRC_DIR="${PUBLISH_INPUTS_DIR}/sources"
|
||||
echo "PUBLISH_INPUTS_SRC_DIR=$PUBLISH_INPUTS_SRC_DIR"
|
||||
for PKG_SRC_INPUT in $(find "${SRCS_INPUT}" -maxdepth 1 -type d) ; do
|
||||
PUBLISH_INPUT_SRC_PKG_DIR="${PUBLISH_INPUTS_SRC_DIR}/$(basename "${PKG_SRC_INPUT}")"
|
||||
for f in $(find ${PKG_SRC_INPUT} -maxdepth 1 -type f ); do
|
||||
with_default_retries mkdir -p ${PUBLISH_INPUT_SRC_PKG_DIR}
|
||||
with_default_retries cp_or_link "${f}" "${PUBLISH_INPUT_SRC_PKG_DIR}" $CHECKSUM_FILES
|
||||
done
|
||||
done
|
||||
if [ -d "${PUBLISH_INPUTS_SRC_DIR}" ]; then
|
||||
get_file_data_from_dir "${PUBLISH_INPUTS_SRC_DIR}" "${PUBLISH_INPUTS_SRC_DIR}/${CHECKSUM_FN}"
|
||||
CHECKSUM_FILES+=" ${PUBLISH_INPUTS_SRC_DIR}/${CHECKSUM_FN}"
|
||||
fi
|
||||
fi
|
||||
|
||||
for BT in "${BUILD_TYPES[@]}" ; do
|
||||
BT_OUTPUT="${BUILD_OUTPUT}/localdisk/loadbuild/jenkins/${PROJECT}/${BT}"
|
||||
if [ -d "${BT_OUTPUT}" ]; then
|
||||
PUBLISH_OUTPUTS_SRC_DIR="${PUBLISH_OUTPUTS_DIR}/${BT}/sources"
|
||||
PUBLISH_OUTPUTS_PKG_DIR="${PUBLISH_OUTPUTS_DIR}/${BT}/packages"
|
||||
for PKG_OUTPUT in $(find "${BT_OUTPUT}" -maxdepth 1 -type d) ; do
|
||||
echo "PKG_OUTPUT=${PKG_OUTPUT}"
|
||||
|
||||
if [ $(find "${PKG_OUTPUT}" -maxdepth 1 -type f -name '*.dsc' | wc -l) -ne 0 ]; then
|
||||
PUBLISH_OUTPUTS_SRC_PKG_DIR="${PUBLISH_OUTPUTS_SRC_DIR}/$(basename "${PKG_OUTPUT}")"
|
||||
with_default_retries mkdir -p "${PUBLISH_OUTPUTS_SRC_PKG_DIR}"
|
||||
for f in $(find ${PKG_OUTPUT} -maxdepth 1 -type f -not -name '*deb' \
|
||||
-and -not -name '*buildinfo' \
|
||||
-and -not -name '*changes' \
|
||||
-and -not -name '*build' \
|
||||
-and -not -name '*log' ); do
|
||||
with_default_retries cp_or_link "${f}" "${PUBLISH_OUTPUTS_SRC_PKG_DIR}" $CHECKSUM_FILES
|
||||
done
|
||||
fi
|
||||
|
||||
if [ $(find "${PKG_OUTPUT}" -maxdepth 1 -type f -name '*.deb' | wc -l) -ne 0 ]; then
|
||||
with_default_retries mkdir -p "${PUBLISH_OUTPUTS_PKG_DIR}"
|
||||
for f in $(find ${PKG_OUTPUT} -maxdepth 1 -type f -name '*deb' ); do
|
||||
with_default_retries cp_or_link "${f}" "${PUBLISH_OUTPUTS_PKG_DIR}" $CHECKSUM_FILES
|
||||
done
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -d "${PUBLISH_OUTPUTS_SRC_DIR}" ]; then
|
||||
get_file_data_from_dir "${PUBLISH_OUTPUTS_SRC_DIR}" "${PUBLISH_OUTPUTS_SRC_DIR}/${CHECKSUM_FN}"
|
||||
CHECKSUM_FILES+=" ${PUBLISH_OUTPUTS_SRC_DIR}/${CHECKSUM_FN}"
|
||||
fi
|
||||
|
||||
if [ -d "${PUBLISH_OUTPUTS_PKG_DIR}" ]; then
|
||||
get_file_data_from_dir "${PUBLISH_OUTPUTS_PKGS_ROOT}" "${PUBLISH_OUTPUTS_PKG_DIR}/${CHECKSUM_FN}"
|
||||
CHECKSUM_FILES+=" ${PUBLISH_OUTPUTS_PKG_DIR}/${CHECKSUM_FN}"
|
||||
make_deb_repo "${PUBLISH_OUTPUTS_PKG_DIR}"
|
||||
fi
|
||||
fi
|
||||
done
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_STREAM
|
||||
|
||||
|
@ -1,17 +1,29 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
load_build_env
|
||||
|
||||
#VERBOSE_ARG="--verbose"
|
||||
|
||||
if [[ -d "$BUILD_HOME/workspace/std/build-helm" ]] ; then
|
||||
mkdir -p "$BUILD_OUTPUT_HOME"
|
||||
dir_is_empty() {
|
||||
if [[ -d "$1" ]] ; then
|
||||
[[ $(find "$1" -mindepth 1 -maxdepth 1 -print -quit | wc -l) -le 0 ]]
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
if ! dir_is_empty "$BUILD_HOME/workspace/helm-charts" ; then
|
||||
my_user="$(id -u)"
|
||||
my_group="$(id -g)"
|
||||
safe_copy_dir $DRY_RUN_ARG $VERBOSE_ARG --chown $my_user:$my_group \
|
||||
"$BUILD_HOME/workspace/std/build-helm" \
|
||||
"$BUILD_OUTPUT_HOME/workspace/std/"
|
||||
if [[ ! -d "$BUILD_OUTPUT_HOME/workspace/helm-charts" ]] ; then
|
||||
mkdir "$BUILD_OUTPUT_HOME/workspace/helm-charts"
|
||||
fi
|
||||
safe_copy_dir $DRY_RUN_ARG $VERBOSE_ARG --delete --chown $my_user:$my_group \
|
||||
"$BUILD_HOME/workspace/helm-charts/" \
|
||||
"$BUILD_OUTPUT_HOME/workspace/helm-charts/"
|
||||
|
||||
notice "Helm charts archived in $BUILD_OUTPUT_HOME/workspace/helm-charts"
|
||||
fi
|
||||
|
@ -1,9 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
|
||||
require_env BUILD_RT
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
load_build_env
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
load_build_env
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_RT
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_RT
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_STREAM
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_HOME
|
||||
require_env DRY_RUN
|
||||
@ -26,12 +26,15 @@ declare -a cmd=(
|
||||
"--version=$base_image_tag"
|
||||
"--attempts=$DOCKER_BUILD_RETRY_COUNT"
|
||||
"--stream=$BUILD_STREAM"
|
||||
"--registry=$DOCKER_REGISTRY"
|
||||
"--user=$DOCKER_REGISTRY_ORG"
|
||||
"--latest"
|
||||
"--latest-tag=$base_image_latest_tag"
|
||||
)
|
||||
|
||||
if [[ -n "$DOCKER_REGISTRY" ]] ; then
|
||||
cmd+=("--registry=$DOCKER_REGISTRY")
|
||||
fi
|
||||
|
||||
if [[ "$USE_DOCKER_CACHE" == true ]] ; then
|
||||
cmd+=("--cache")
|
||||
fi
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_HOME
|
||||
require_env DRY_RUN
|
||||
@ -22,7 +22,10 @@ if [[ -n "$DOCKER_IMAGE_BASE" ]] ; then
|
||||
base_img="$DOCKER_IMAGE_BASE"
|
||||
else
|
||||
base_image_tag="$BUILD_BRANCH-$BUILD_STREAM-$TIMESTAMP"
|
||||
base_img="$DOCKER_REGISTRY/$DOCKER_REGISTRY_ORG/stx-$DOCKER_BASE_OS:$base_image_tag"
|
||||
base_img="$DOCKER_REGISTRY_ORG/stx-$DOCKER_BASE_OS:$base_image_tag"
|
||||
if [[ -n "$DOCKER_REGISTRY" ]] ; then
|
||||
base_img="$DOCKER_REGISTRY/$base_img"
|
||||
fi
|
||||
fi
|
||||
|
||||
declare -a cmd=(
|
||||
@ -33,11 +36,14 @@ declare -a cmd=(
|
||||
"--no-pull-base"
|
||||
"--version=$TIMESTAMP"
|
||||
"--prefix=$BUILD_BRANCH"
|
||||
"--registry=$DOCKER_REGISTRY"
|
||||
"--user=$DOCKER_REGISTRY_ORG"
|
||||
"--latest"
|
||||
)
|
||||
|
||||
if [[ -n "$DOCKER_REGISTRY" ]] ; then
|
||||
cmd+=("--registry=$DOCKER_REGISTRY")
|
||||
fi
|
||||
|
||||
if [[ -f "$WORKSPACE_ROOT/$wheels_file" ]] ; then
|
||||
cmd+=("--wheels=\$MY_WORKSPACE/$wheels_file")
|
||||
fi
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_HOME
|
||||
require_env DRY_RUN
|
||||
@ -39,8 +39,16 @@ build_helm_charts() {
|
||||
stx_docker_cmd $DRY_RUN_ARG "set -e ; cd \"\$MY_REPO/build-tools\" ; export PATH=\"\$PWD:\$PATH\" ; $cmd"
|
||||
}
|
||||
|
||||
copy_dir() {
|
||||
find "$1" -mindepth 1 -maxdepth 1 -exec cp -f -alr -t "$2" '{}' '+'
|
||||
}
|
||||
|
||||
# call build-helm-charts.sh in container for each stream/tag
|
||||
if [[ "${#image_dirs[@]}" -gt 0 ]] ; then
|
||||
output_dir="$BUILD_HOME/workspace/helm-charts"
|
||||
if [[ -d "$output_dir" ]] ; then
|
||||
rm -rf --one-file-system "$output_dir" || exit 1
|
||||
fi
|
||||
for build_stream in $BUILD_STREAMS ; do
|
||||
for build_tag in $BUILD_TAGS ; do
|
||||
for os in $DOCKER_BASE_OS ; do
|
||||
@ -63,6 +71,8 @@ if [[ "${#image_dirs[@]}" -gt 0 ]] ; then
|
||||
continue
|
||||
fi
|
||||
|
||||
tmp_output_dir=$BUILD_HOME/workspace/std/build-helm
|
||||
|
||||
for app in ${HELM_CHART_APPS:-NONE} ; do
|
||||
cmd="build-helm-charts.sh"
|
||||
cmd+=" --verbose"
|
||||
@ -75,8 +85,15 @@ if [[ "${#image_dirs[@]}" -gt 0 ]] ; then
|
||||
cmd+=" | tee \"\$MY_WORKSPACE/helm-${label}.log\""
|
||||
cmd+=" ; [[ \${PIPESTATUS[0]} -eq 0 ]]"
|
||||
build_helm_charts "$cmd" || exit 1
|
||||
if [[ -d "$tmp_output_dir" ]] ; then
|
||||
mkdir -p "$output_dir" || exit 1
|
||||
copy_dir "$tmp_output_dir" "$output_dir" || exit 1
|
||||
fi
|
||||
done
|
||||
done
|
||||
done
|
||||
done
|
||||
if [[ -d "$output_dir" ]] ; then
|
||||
notice "helm charts created in $output_dir"
|
||||
fi
|
||||
fi
|
||||
|
@ -1,11 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_HOME
|
||||
require_env BUILD_ISO
|
||||
require_env BUILD_RT
|
||||
|
||||
load_build_env
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
require_env BUILD_HOME
|
||||
|
||||
require_env BUILD_PACKAGES
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
require_env BUILD_HOME
|
||||
require_env USE_DOCKER_CACHE
|
||||
require_env DRY_RUN
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_HOME
|
||||
require_env CLEAN_PACKAGES
|
||||
|
@ -1,8 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/../lib/retries.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/retries.sh
|
||||
|
||||
require_env BUILD_HOME
|
||||
require_env DRY_RUN
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
set -x
|
||||
load_build_env
|
||||
|
@ -1,8 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/../lib/changelog_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/changelog_utils.sh
|
||||
|
||||
require_env BUILD_HOME
|
||||
require_env FORCE_BUILD
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_HOME
|
||||
require_env PUSH_DOCKER_IMAGES
|
||||
@ -24,7 +24,7 @@ for login_spec in $DOCKER_REGISTRY_PUSH_LOGIN_LIST ; do
|
||||
read login_reg dummy <<<$(parse_docker_registry "$login_spec")
|
||||
# check if we intend to push to it
|
||||
declare spec reg
|
||||
for spec in $DOCKER_REGISTRY $DOCKER_EXTRA_REGISTRY_PREFIX_LIST ; do
|
||||
for spec in ${DOCKER_REGISTRY:-docker.io} $DOCKER_EXTRA_REGISTRY_PREFIX_LIST ; do
|
||||
read reg dummy <<<$(parse_docker_registry "$spec")
|
||||
if [[ "$reg" == "$login_reg" && -z "${login_repos_hash[$reg]}" ]] ; then
|
||||
login_repos_hash["$reg"]=1
|
||||
@ -52,7 +52,7 @@ unset login_repos_hash
|
||||
#
|
||||
if [[ -z "$DOCKER_CONFIG_FILE" ]] ; then
|
||||
DOCKER_CONFIG_FILE=~/.docker/config.json
|
||||
elif [[ ! $DOCKER_CONFIG =~ ^/ ]] ; then
|
||||
elif [[ ! $DOCKER_CONFIG_FILE =~ ^/ ]] ; then
|
||||
DOCKER_CONFIG_FILE="$BUILD_HOME/$DOCKER_CONFIG_FILE"
|
||||
fi
|
||||
require_file "$DOCKER_CONFIG_FILE"
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_RT
|
||||
|
||||
|
@ -1,7 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
SCRIPTS_DIR="$(dirname "$0")"
|
||||
source "$SCRIPTS_DIR/lib/job_utils.sh"
|
||||
|
||||
require_env BUILD_HOME
|
||||
|
||||
if [[ -d "$BUILD_HOME" ]] ; then
|
||||
info "creating $BUILD_HOME"
|
||||
@ -11,7 +14,7 @@ fi
|
||||
if [[ ! -f "$BUILD_HOME/build.conf" ]] ; then
|
||||
info "$BUILD_HOME/build.conf: file not found"
|
||||
info "creating $BUILD_HOME/build.conf.example"
|
||||
cp "$TOP_SCRIPTS_DIR/templates/build.conf.example.in" "$BUILD_HOME/build.conf.example"
|
||||
cp "$SCRIPTS_DIR/templates/build.conf.example.in" "$BUILD_HOME/build.conf.example"
|
||||
info "Please use the example file as the starting point"
|
||||
exit 1
|
||||
fi
|
||||
@ -29,7 +32,7 @@ done
|
||||
|
||||
# Install source_me.sh to $BUILD_HOME
|
||||
info "creating $BUILD_HOME/source_me.sh"
|
||||
cp "$TOP_SCRIPTS_DIR/templates/source_me.sh.in" "$BUILD_HOME/source_me.sh"
|
||||
cp "$SCRIPTS_DIR/templates/source_me.sh.in" "$BUILD_HOME/source_me.sh"
|
||||
|
||||
# Delete old jenkins job list
|
||||
if [[ -d "$BUILD_HOME/jenkins" ]] ; then
|
||||
|
@ -1,5 +1,5 @@
|
||||
: ${LOADBUILD_ROOT:="/localdisk/loadbuild"}
|
||||
: ${DESIGNER_ROOT:="/localdisk/designer"}
|
||||
: ${LOADBUILD_ROOTS:="/localdisk/loadbuild:/home/localdisk/loadbuild"}
|
||||
: ${DESIGNER_ROOTS:="/localdisk/designer:/home/localdisk/designer"}
|
||||
|
||||
source "${BASH_SOURCE[0]%/*}"/utils.sh || return 1
|
||||
source "${BASH_SOURCE[0]%/*}"/log_utils.sh || return 1
|
||||
@ -254,10 +254,11 @@ __get_protected_dirs() {
|
||||
[[ -n "$USER" ]] || die "USER not set"
|
||||
[[ -n "$PROJECT" ]] || die "PROJECT not set"
|
||||
|
||||
echo "$DESIGNER_ROOT:ro"
|
||||
echo "$LOADBUILD_ROOT:ro"
|
||||
echo "$DESIGNER_ROOT/$USER/$PROJECT"
|
||||
echo "$LOADBUILD_ROOT/$USER/$PROJECT"
|
||||
local dir
|
||||
for dir in $(echo "$DESIGNER_ROOTS" "$LOADBUILD_ROOTS" | sed 's/:/ /g') ; do
|
||||
echo "$dir:ro"
|
||||
echo "$dir/$USER/$PROJECT"
|
||||
done
|
||||
}
|
||||
|
||||
#
|
@ -71,7 +71,7 @@ publish_file() {
|
||||
|
||||
# if all else fails, copy it
|
||||
if [[ "$link_created" != "link_created" ]] ; then
|
||||
\cp -f --preserve=mode,timestamps,xattr "$filename" "$dst_file" || exit 1
|
||||
\cp -f --preserve=mode,timestamps "$filename" "$dst_file" || exit 1
|
||||
echo "COPY $dst_file" >&2
|
||||
fi
|
||||
|
@ -1,8 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
load_build_env
|
||||
|
||||
echo "BUILD_OUTPUT_HOME_URL=$BUILD_OUTPUT_HOME_URL"
|
||||
echo "PUBLISH_URL=$PUBLISH_URL"
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_HOME
|
||||
require_env BUILD_STREAM
|
||||
|
@ -1,13 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
load_build_env
|
||||
|
||||
$DRY_RUN && bail "DRY_RUN not supported, bailing out" || :
|
||||
|
||||
src_dir="$BUILD_OUTPUT_HOME/$WORKSPACE_ROOT_SUBDIR/std/build-helm/stx"
|
||||
src_dir="$BUILD_OUTPUT_HOME/$WORKSPACE_ROOT_SUBDIR/helm-charts/stx"
|
||||
dst_dir="$PUBLISH_DIR/outputs/helm-charts"
|
||||
|
||||
files="$(
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
source $(dirname "$0")/../lib/job_utils.sh || exit 1
|
||||
source $(dirname "$0")/../lib/publish_utils.sh || exit 1
|
||||
source $(dirname "$0")/lib/job_utils.sh || exit 1
|
||||
source $(dirname "$0")/lib/publish_utils.sh || exit 1
|
||||
|
||||
load_build_env || exit 1
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env JENKINS_API_USERPASS
|
||||
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
#set -e
|
||||
|
||||
source $(dirname "$0")/../lib/job_utils.sh || exit 1
|
||||
source $(dirname "$0")/../lib/publish_utils.sh || exit 1
|
||||
source $(dirname "$0")/lib/job_utils.sh || exit 1
|
||||
source $(dirname "$0")/lib/publish_utils.sh || exit 1
|
||||
|
||||
require_env BUILD_RT || exit 1
|
||||
require_env BUILD_ISO || exit 1
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_HOME
|
||||
require_env TIMESTAMP
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_STATUS
|
||||
|
||||
@ -15,6 +15,16 @@ touch "$BUILD_OUTPUT_HOME/FAIL"
|
||||
|
||||
ARCHIVE_ROOT=$(dirname "$BUILD_OUTPUT_HOME")
|
||||
|
||||
same_path() {
|
||||
if [[ ! -e "$1" && ! -e "$2" ]] ; then
|
||||
return 1
|
||||
fi
|
||||
local a b
|
||||
a="$(readlink -f "$1")" || exit 1
|
||||
b="$(readlink -f "$2")" || exit 1
|
||||
[[ "$a" == "$b" ]]
|
||||
}
|
||||
|
||||
if [[ "$BUILD_STATUS" == "success" ]] ; then
|
||||
ARCHIVE_ROOT=$(dirname "$BUILD_OUTPUT_HOME")
|
||||
link_target=$(basename "$BUILD_OUTPUT_HOME")
|
||||
|
@ -1,11 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_HOME
|
||||
require_env BUILD_ISO
|
||||
require_env BUILD_RT
|
||||
|
||||
load_build_env
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_HOME
|
||||
require_env REBUILD_BUILDER_IMAGES
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
source $(dirname "$0")/../lib/job_utils.sh
|
||||
source $(dirname "$0")/lib/job_utils.sh
|
||||
|
||||
require_env BUILD_HOME
|
||||
load_build_config
|
||||
|
2
vars/Constants.groovy
Normal file
2
vars/Constants.groovy
Normal file
@ -0,0 +1,2 @@
|
||||
@groovy.transform.Field
|
||||
def SCRIPTS_DIR = "${WORKSPACE}/scripts"
|
Loading…
x
Reference in New Issue
Block a user