Add jenkins pipeline for patch creation

This change adds support for creating a patch on top of specified build
state using Jenkins:
- patch-monolithic: Main job where all the stages are listed and called.
- ostree-pull: Part where we pull the ostree from the remote build home.
- patch-make: Part where "patch-builder" is executed and the patch gets
    created.
- patch-iso: Part where the "create-prepatched-iso" is executed and the
    pre-patched ISO is created.
- publish-patch: Part where the publish directory for the pre-patched
    ISO and patch is created.
We also update the build-packages part to support "reuse_maximum" option
when running build-pkgs.

Test plan:
    PASS: Run full monolithic pipeline
    PASS: Run full patch-monolithic pipeline

Story: 2010226
Task: 50700

Change-Id: I7c7d688f2c568532a0b23844dcfd81349ca96476
Signed-off-by: Dostoievski Batista <dostoievski.albinobatista@windriver.com>
This commit is contained in:
Dostoievski Batista 2024-07-31 13:09:11 -03:00
parent 16988a0dbf
commit 0eec407738
12 changed files with 856 additions and 4 deletions

View File

@ -58,6 +58,18 @@ pipeline {
booleanParam (
name: 'BUILD_ISO'
)
string(
name: 'STX_SHARED_SOURCE'
)
string(
name: 'STX_SHARED_REPO'
)
booleanParam(
name: 'PATCH_BUILD',
defaultValue: false,
description: "Identify if we are building a patch." +
"As we use the same sub-jobs than the usual build we need to know when we are creating a patch."
)
}
stages {

View File

@ -0,0 +1,59 @@
// vim: syn=groovy
//
// Copyright (c) 2024 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string(
name: 'MASTER_JOB_NAME'
)
string(
name: 'MASTER_BUILD_NUMBER'
)
string(
name: 'JENKINS_SCRIPTS_BRANCH'
)
string(
name: 'BUILD_HOME'
)
string(
name: 'REMOTE_SERVER'
)
string(
name: 'REMOTE_BUILD_HOME'
)
string(
name: 'TIMESTAMP'
)
booleanParam(
name: 'SHELL_XTRACE'
)
booleanParam(
name: 'DRY_RUN'
)
}
stages {
stage("ostree-pull") {
steps {
sh ("bash ${Constants.SCRIPTS_DIR}/ostree-pull.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,68 @@
// vim: syn=groovy
//
// Copyright (c) 2024 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string(
name: 'MASTER_JOB_NAME'
)
string(
name: 'MASTER_BUILD_NUMBER'
)
string(
name: 'JENKINS_SCRIPTS_BRANCH'
)
string(
name: 'BUILD_HOME'
)
string(
name: 'SW_VERSION'
)
string(
name: 'PATCH_NUM'
)
string(
name: 'TIMESTAMP'
)
booleanParam(
name: 'SHELL_XTRACE'
)
booleanParam(
name: 'DRY_RUN'
)
string(
name: 'REMOTE_SERVER'
)
string(
name: 'REMOTE_BUILD_HOME'
)
string(
name: 'BUILD_HOME'
)
}
stages {
stage("patch-iso") {
steps {
sh("bash ${Constants.SCRIPTS_DIR}/patch-iso.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,69 @@
// vim: syn=groovy
//
// Copyright (c) 2024 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string(
name: 'MASTER_JOB_NAME'
)
string(
name: 'MASTER_BUILD_NUMBER'
)
string(
name: 'JENKINS_SCRIPTS_BRANCH'
)
string(
name: 'SW_VERSION'
)
string(
name: 'PATCH_NUM'
)
string(
name: 'TIMESTAMP'
)
booleanParam(
name: 'SHELL_XTRACE'
)
booleanParam(
name: 'DRY_RUN'
)
string (
name: 'BUILD_HOME'
)
string(
name: 'CUSTOM_PATCH_RECIPE',
description: "Allow you to specify the path to a custom patch recipe to be used when creating the patch.",
defaultValue: ""
)
string(
name: 'PATCH_NAME',
description: "Allow you to specify a custom patch name for the .patch file.",
defaultValue: ""
)
}
stages {
stage("patch-make") {
steps {
sh("bash ${Constants.SCRIPTS_DIR}/patch-make.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,66 @@
// vim: syn=groovy
//
// Copyright (c) 2024 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
setBuildDescr()
pipeline {
agent any
options {
timestamps()
}
parameters {
string(
name: 'MASTER_JOB_NAME'
)
string(
name: 'MASTER_BUILD_NUMBER'
)
string(
name: 'JENKINS_SCRIPTS_BRANCH'
)
string(
name: 'BUILD_HOME'
)
string(
name: 'SW_VERSION'
)
string(
name: 'PATCH_NUM'
)
string(
name: 'TIMESTAMP'
)
booleanParam(
name: 'SHELL_XTRACE'
)
booleanParam(
name: 'DRY_RUN'
)
string(
name: 'PATCH_PREVIOUS'
)
string(
name: 'PATCH_TAG'
)
}
stages {
stage ("publish-patch") {
steps {
sh("bash ${Constants.SCRIPTS_DIR}/publish-patch.sh")
}
}
}
post {
cleanup {
cleanupPartJob()
}
}
}

View File

@ -0,0 +1,334 @@
// vim: syn=groovy
//
// Copyright (c) 2024 Wind River Systems, Inc.
//
// SPDX-License-Identifier: Apache-2.0
//
library "common@${params.JENKINS_SCRIPTS_BRANCH}"
PROPS = null
IMG_PARAMS = null
IMAGES_FAILED = false
def parseProps(text) {
def x = {}
for (line in text.split (/\n+/)) {
if (line.matches (/\s*(?:#.*)?#/)) {
continue
}
parts = line.split ("=", 2)
key = parts[0]
value = parts[1]
x."${key}" = value
}
return x
}
def loadEnv() {
def data = {}
data.NEED_BUILD = false
data.SUPPRESS_DOCKER_IMAGE_BUILD_ERRORS = true
ws(params.BUILD_HOME) {
if (fileExists ("NEED_BUILD")) {
data.NEED_BUILD = true
}
}
final String configText = sh (script: "${Constants.SCRIPTS_DIR}/print-config.sh", returnStdout: true)
final props = parseProps (configText)
data.BUILD_OUTPUT_HOME_URL = props.BUILD_OUTPUT_HOME_URL
data.PUBLISH_URL = props.PUBLISH_URL
PROPS = data
return data.NEED_BUILD
}
def partJobName(name) {
final String folder = env.JOB_NAME.replaceAll (/(.*\/).+$/, '$1');
if (folder == env.JOB_NAME) {
error "This job must be in a Jenkins folder!"
}
return "/" + folder + "parts/" + name
}
def runPart(name, params = []) {
// Tell Jenkins to checkout the same commit of the sub-job's Jenkinsfile,
// as the current builds' Jenkinsfile's commit.
final gitRef = string (name: 'JENKINS_SCRIPTS_BRANCH', value: env.GIT_COMMIT)
build job: partJobName (name), parameters: copyCurrentParams() + [ gitRef ] + params
}
def printBuildFooter() {
if (PROPS) {
String msg = ""
msg += "\n"
msg += "========================================\n"
msg += "\n"
if (PROPS.NEED_BUILD) {
msg += "Build output: ${PROPS.BUILD_OUTPUT_HOME_URL}\n"
if (PROPS.PUBLISH_URL) {
msg += "Publish output: ${PROPS.PUBLISH_URL}\n"
}
if (IMAGES_FAILED) {
msg += "\n"
msg += "WARNING:\n"
msg += "WARNING: docker images build attempted, but failed!\n"
msg += "WARNING: see log output above\n"
msg += "WARNING:\n"
}
}
else {
echo "*** NO CHANGES - BUILD NOT REQUIRED"
}
msg += "\n"
msg += "========================================\n"
msg += "\n"
echo (msg)
}
}
pipeline {
agent any
options {
timestamps()
}
parameters {
string(
name: 'MASTER_JOB_NAME',
description: 'Name of the job that has trigger this pipeline.'
)
string(
name: 'MASTER_BUILD_NUMBER',
description: 'Number of the job that has trigger this pipeline.'
)
string(
name: 'BUILD_HOME',
description: 'Full path to where the build system is located'
)
string(
name: 'TIMESTAMP',
description: 'Time when the build is started'
)
string(
name: 'PUBLISH_TIMESTAMP',
description: 'Time when the build is published'
)
booleanParam(
name: 'REBUILD_BUILDER_IMAGES'
)
booleanParam(
name: 'BUILDER_USE_DOCKER_CACHE'
)
booleanParam(
name: 'BUILD_PACKAGES',
description: "Enable stage 'Build packages'"
)
booleanParam(
name: 'REFRESH_SOURCE'
)
booleanParam(
name: 'PKG_REUSE'
)
booleanParam(
name: 'BUILD_ISO'
)
booleanParam(
name: 'DRY_RUN'
)
booleanParam(
name: 'SHELL_XTRACE'
)
booleanParam(
name: 'CLEAN_PACKAGES'
)
booleanParam(
name: 'CLEAN_ISO'
)
booleanParam(
name: 'CLEAN_REPOMGR'
)
booleanParam(
name: 'CLEAN_DOWNLOADS'
)
booleanParam(
name: 'CLEAN_DOCKER'
)
booleanParam(
name: 'FORCE_BUILD'
)
booleanParam(
name: 'USE_DOCKER_CACHE',
)
string(
name: 'JENKINS_SCRIPTS_BRANCH'
)
string(
name: 'REMOTE_SERVER',
description: 'Address of the remote server who holds the base build system that the patch will be built on top of.' +
'Used together with REMOTE_BUILD_HOME' +
'e.g.: http://example-build-server.com:8080'
)
string(
name: 'REMOTE_BUILD_HOME',
description: 'Full path from the remote server who holds the base build system that the patch will be built on top of.' +
'Used together with REMOTE_SERVER' +
'e.g.: /localdisk/loadbuild/starlingx-master/latest_build'
)
string(
name: 'SW_VERSION',
description: 'Version of the build being used. e.g., XX.YY'
)
string(
name: 'PATCH_NUM',
description: 'Number of the patch, e.g., 1. To be used together with SW_VERSION like this: XX.YY.PP.'
)
string(
name: 'CUSTOM_PATCH_RECIPE',
description: "Allow you to specify the path to a custom patch recipe to be used when creating the patch.",
defaultValue: ""
)
string(
name: 'PATCH_NAME',
description: "Allow you to specify a custom patch name for the .patch file.",
defaultValue: ""
)
string(
name: 'PATCH_PREVIOUS',
description: "Git ref for the previous created patch. It can be a git tag name.",
defaultValue: ""
)
string(
name: 'PATCH_TAG',
description: "Git ref to be used as HEAD",
defaultValue: "HEAD"
)
string(
name: 'STX_SHARED_SOURCE',
description: "Full HTTPS address of the deb-local-source repository from where we pull the packages to be re-used",
defaultValue: ""
)
string(
name: 'STX_SHARED_REPO',
description: "Full HTTPS address of the deb-local-build repository from where we pull the packages to be re-used",
defaultValue: ""
)
text(
name: 'PATCH_LIST',
defaultValue: '-',
description: '''\
<pre><code>List of Gerrit URLs to apply before running the build, one per line "[PATH] URL REF", eg:
https://review.opendev.org/starlingx/config refs/changes/71/859571/4
https://review.opendev.org/starlingx/stx-puppet refs/changes/75/859575/1
https://review.opendev.org/starlingx/tools refs/changes/76/859576/2
or with paths relative to repo root:
cgcs-root/stx/config https://review.opendev.org/starlingx/config refs/changes/71/859571/4
cgcs-root/stx/stx-puppet https://review.opendev.org/starlingx/stx-puppet refs/changes/75/859575/1
stx-tools https://review.opendev.org/starlingx/tools refs/changes/76/859576/2
</code></pre>
'''
)
}
stages {
stage('Start Environment') {
steps {
script {
// Initialize BUILD_HOME, create build.conf & stx.conf
runPart("init-env")
// Update source tree
runPart("clone-source")
// create BUILD & stx.conf
runPart("configure-build")
// Stop containers before updating source treee
runPart("stop-containers")
// Create changelog, LAST_COMMITS, NEED_BUILD etc
runPart("create-changelog")
// Is build required?
if (!loadEnv()) {
println "*** NO CHANGES, BUILD NOT REQUIRED ***"
}
}
}
}
stage('Build') {
when { expression { PROPS.NEED_BUILD } }
stages {
stage('Prepare:CLEAN') {
steps {
// Delete or keep packages, aptly state, etc depending on build params
runPart("clean-build")
// start containers
runPart("start-containers")
// login to docker early to catch login errors
runPart ("docker-login")
}
}
stage('Prepare:DOWNLOAD') {
steps {
// populate mirrors
runPart("download-prerequisites")
}
}
stage('PACKAGES') {
when {
expression {
params.BUILD_PACKAGES
}
}
steps {
// build and publish packages
runPart("build-packages",[ booleanParam (name: 'PATCH_BUILD', value: true) ])
runPart("publish-packages")
}
}
stage('PATCH:prepare') {
steps {
// pull ostree that we will use as base
runPart("ostree-pull")
}
}
stage('PATCH:make') {
steps {
// create the patch
runPart("patch-make")
// create the pre-patched iso
runPart("patch-iso")
// publish patches and pre-patched iso
runPart("publish-patch")
}
}
stage('PATCH:export') {
steps {
runPart("build-export-dir")
}
}
}
post {
always {
echo "build result: ${currentBuild.result}"
runPart("stop-containers")
// archive anything we may have missed
runPart("archive-misc")
// save this job's build number on disk (for publish-logs)
saveCurrentJenkinsBuildInfo()
}
success {
// copy LAST_COMMITS to archive root & update the "latest_build" symlink in
// both archive and publish roots
sh("BUILD_STATUS=success ${Constants.SCRIPTS_DIR}/create-latest-symlinks.sh")
// Print archive & publish URLs
printBuildFooter()
// publish this job's Jenkins log
runPart("publish-logs")
}
unsuccessful {
sh("BUILD_STATUS=fail ${Constants.SCRIPTS_DIR}/create-latest-symlinks.sh")
// publish this job's Jenkins log
runPart("publish-logs")
}
}
}
} // stages
}

View File

@ -17,7 +17,8 @@ require_job_env BUILD_ISO
declare_job_env BUILD_PACKAGES_PARALLEL_JOBS
require_job_env PKG_REUSE
declare_job_env STX_SHARED_SOURCE
declare_job_env STX_SHARED_SOURCE
declare_job_env STX_SHARED_REPO
declare_job_env PATCH_BUILD
load_build_env
@ -53,7 +54,19 @@ while [[ $count -lt $BUILD_PACKAGES_ITERATIONS ]] ; do
if $PKG_REUSE && [[ $count -eq 0 ]] ; then
extra_args+=("--clean")
# When building packages with "--reuse" flag, we re-use almost all packages
# except the ones in debian-mirror-tools/config/debian/common/never_reuse.lst
# the packages listed there are built, not re used.
# When building packages with "--reuse_maximum" flag, we re-use every packages
# ignoring the debian-mirror-tools/config/debian/common/never_reuse.lst
# this is useful when building a patch because we don't want to built anything
# that we didn't update in the patch.
if $PATCH_BUILD; then
extra_args+=("--reuse_maximum")
else
extra_args+=("--reuse")
fi
if [[ -n $STX_SHARED_SOURCE ]]; then
environment_args+=("STX_SHARED_SOURCE=$STX_SHARED_SOURCE")

View File

@ -276,15 +276,24 @@ check_all_pods_running() {
fi
}
# Usage: stx_docker_cmd [--dry-run] SHELL_SNIPPET
# Usage: stx_docker_cmd [--dry-run] [--container=NAME] SHELL_SNIPPET
stx_docker_cmd() {
local dry_run=0
local container
if [[ "$1" == "--dry-run" ]] ; then
dry_run=1
shift
fi
if [[ "${1}" = "--container"* ]] ; then
container="${1#*=}"
shift 1
fi
if [[ "$QUIET" != "true" ]] ; then
if [[ -n "$container" ]]; then
echo ">>> running $container pod command:" >&2
else
echo ">>> running builder pod command:" >&2
fi
echo "$1" | sed -r 's/^/\t/' >&2
fi
if [[ "$dry_run" -ne 1 ]] ; then
@ -292,6 +301,9 @@ stx_docker_cmd() {
if __started_by_jenkins ; then
args+=("--no-tty")
fi
if [[ -n "$container" ]]; then
args+=("--container" $container)
fi
stx -d shell "${args[@]}" -c "$1"
fi
}
@ -941,6 +953,19 @@ __print_deb_release() {
__print_deb_release_checksums "SHA512" "sha512sum" || return 1
}
# Usage: ostree_cmd [--dry-run] OSTREE_REPO CMD
ostree_cmd() {
# ostree commands will be executed in the LAT container
local dry_run_arg
if [[ "$1" == "--dry-run" ]] ; then
dry_run_arg=$1
shift
fi
local ostree_repo=$1
local command=$2
stx_docker_cmd ${dry_run_arg} "--container=lat" "ostree --repo=/localdisk/deploy/${ostree_repo} ${command}"
}
if [[ "${SHELL_XTRACE,,}" == "true" || "${SHELL_XTRACE}" == "1" ]] ; then
set -x
export SHELL_XTRACE=true

46
scripts/ostree-pull.sh Normal file
View File

@ -0,0 +1,46 @@
#!/bin/bash
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
set -e
source $(dirname "$0")/lib/job_utils.sh
require_job_env BUILD_HOME
require_job_env REMOTE_BUILD_HOME
require_job_env REMOTE_SERVER
load_build_env
# run the patch prepare step
# Prepare the folder to receive the ostree repository
DEPLOY_DIR="${BUILD_HOME}/localdisk/deploy"
OSTREE_REPO="ostree_repo"
REMOTE_BUILD_OSTREE=${REMOTE_BUILD_HOME}/localdisk/deploy/${OSTREE_REPO}
mkdir -p "${DEPLOY_DIR}" && cd "$_"
# Init ostree repo
ostree_cmd ${DRY_RUN_ARG} ${OSTREE_REPO} "init --mode=archive-z2"
notice "Pulling content from remote ostree ${REMOTE_SERVER}/${REMOTE_BUILD_HOME}"
# Add build as remote
ostree_cmd ${DRY_RUN_ARG} ${OSTREE_REPO} "remote add --set=gpg-verify=false deb-build \
${REMOTE_SERVER}/${REMOTE_BUILD_OSTREE} starlingx"
# Pull only the latest commit
ostree_cmd ${DRY_RUN_ARG} ${OSTREE_REPO} "pull --depth=0 --mirror deb-build starlingx"
# Update ostree summary
ostree_cmd ${DRY_RUN_ARG} ${OSTREE_REPO} "summary --update"
notice "Pull done, ostree commit log"
ostree_cmd ${DRY_RUN_ARG} ${OSTREE_REPO} "log starlingx"
if ! $DRY_RUN ; then
notice "Fixing ostree_repo permissions"
OSTREE_USER=`stat -c %u ${REMOTE_BUILD_OSTREE}`
OSTREE_GROUP=`stat -c %g ${REMOTE_BUILD_OSTREE}`
notice "Changing ostree permission to match source, user: ${OSTREE_USER}, group: ${OSTREE_GROUP}"
docker run --rm --volume "${BUILD_HOME}:${BUILD_HOME}" debian:bullseye chown -R "${OSTREE_USER}:${OSTREE_GROUP}" "${DEPLOY_DIR}/${OSTREE_REPO}"
fi

53
scripts/patch-iso.sh Normal file
View File

@ -0,0 +1,53 @@
#!/bin/bash
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
set -e
source $(dirname "$0")/lib/job_utils.sh
require_job_env BUILD_HOME
require_job_env SW_VERSION
require_job_env PATCH_NUM
load_build_env
require_job_env SIGNING_SERVER
require_job_env SIGNING_USER
require_job_env REMOTE_BUILD_HOME
require_job_env REMOTE_SERVER
require_job_env BUILD_HOME
declare_job_env MY_REPO "$REPO_ROOT/cgcs-root"
declare_job_env PATCH_PREVIOUS_NAME "${PATCH_PREVIOUS:1}"
PREPATCHED_NAME="prepatched-iso-${SW_VERSION}.${PATCH_NUM}.iso"
DEFAULT_PATCH_OUTPUT="/localdisk/deploy/patch_output/"
# Collect every patches from the default patch output path
FULL_PATH_PATCH_OUTPUT="${BUILD_HOME}${DEFAULT_PATCH_OUTPUT}"
PATCHES_LIST=$(find $FULL_PATH_PATCH_OUTPUT -type f -name "*.patch" -printf '%f\n' | sort)
# Prepare the patches list to be used in lat container, adding -p prefix and the path
PATCHES_FLAG=$(printf '%s\n' ${PATCHES_LIST[*]} | xargs -I {} echo "-p ${DEFAULT_PATCH_OUTPUT}{}")
# We collect the iso from the remote build home to be used as base
REMOTE_ISO="${REMOTE_SERVER}/${REMOTE_BUILD_HOME}/localdisk/deploy/starlingx-intel-x86-64-cd.iso"
curl -L ${REMOTE_ISO} --output ${BUILD_HOME}/localdisk/starlingx-base.iso
ISO_BASE="/localdisk/starlingx-base.iso"
# Create pre-patched ISO
stx_docker_cmd ${DRY_RUN_ARG} "--container=lat" "create-prepatched-iso -i ${ISO_BASE} \
${PATCHES_FLAG[*]} -o /localdisk/deploy/${PREPATCHED_NAME}"
if ! $DRY_RUN ; then
(
export MY_WORKSPACE=$WORKSPACE_ROOT
export PATH=$MY_REPO/build-tools:$PATH:/usr/local/bin
PREPATCHED_PATH="${BUILD_HOME}/localdisk/deploy/${PREPATCHED_NAME}"
maybe_run sign_iso_formal.sh "${PREPATCHED_PATH}" || die "failed to sign ISO"
)
notice "Formal signing done"
fi

34
scripts/patch-make.sh Normal file
View File

@ -0,0 +1,34 @@
#!/bin/bash
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
set -e
source $(dirname "$0")/lib/job_utils.sh
require_job_env BUILD_HOME
require_job_env SW_VERSION
require_job_env PATCH_NUM
load_build_env
# RECIPE_PATH indicate a path inside the builder container
RECIPE_PATH="\${MY_REPO_ROOT_DIR}/cgcs-root/patch-xml/${SW_VERSION}/${SW_VERSION}.${PATCH_NUM}.xml"
# Check if we are using a custom patch recipe
if [ ! -z "${CUSTOM_PATCH_RECIPE}" ]; then
RECIPE_PATH="${CUSTOM_PATCH_RECIPE}"
fi
# If custom name is provided add it to the parameters
EXTRA_ARGS=()
if [ ! -z "${PATCH_NAME}" ]; then
EXTRA_ARGS+=("--name ${PATCH_NAME}")
fi
# Build the patch
stx_docker_cmd $DRY_RUN_ARG "patch-builder --recipe ${RECIPE_PATH} ${EXTRA_ARGS[*]}"

73
scripts/publish-patch.sh Normal file
View File

@ -0,0 +1,73 @@
#!/bin/bash
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
source $(dirname "$0")/lib/job_utils.sh || exit 1
source $(dirname "$0")/lib/publish_utils.sh || exit 1
require_job_env BUILD_HOME
require_job_env SW_VERSION
require_job_env PATCH_NUM
require_job_env PATCH_PREVIOUS
require_job_env PATCH_TAG
load_build_env || exit 1
require_job_env PUBLISH_ROOT
if $DRY_RUN ; then
bail "DRY_RUN=false is not supported, bailing out"
fi
# Create temporary folder
TEMP_DIR="$BUILD_OUTPUT_HOME/tmp"
mkdir -p "$TEMP_DIR" || exit 1
# Add the file checksum to the list
checksum_files_list_file="$TEMP_DIR/published_patch_checksum_files"
find_checksum_files "${PUBLISH_SUBDIR}/outputs/patch" \
>"$checksum_files_list_file" || exit 1
dst_dir="${PUBLISH_DIR}/outputs/patch"
checksum_file="$dst_dir/$CHECKSUMS_FILENAME"
src_dir="$BUILD_OUTPUT_HOME/localdisk/deploy"
abs_src_dir="$(readlink -e "$src_dir")" || continue
# Clean destination folder
rm -rf --one-file-system "$dst_dir" || exit 1
mkdir -p "$dst_dir" || exit 1
# Go to every repo and get the new commits we have added to make the patch
# since the $PATCH_PREVIOUS
(
cd "${REPO_ROOT}"
(
set -e
repo forall -c 'echo $REPO_PATH'
) | while read gitdir ; do
cd ${REPO_ROOT}/$gitdir
commit="$(git tag -l ${PATCH_PREVIOUS} | grep -q ${PATCH_PREVIOUS} && git log --pretty=format:'%ad %an %H %s' --date=short ${PATCH_PREVIOUS}.. | grep -v '.gitreview')"
if [ "$commit" ]; then printf "%b" "$gitdir \n$commit \n\n"; fi
done
) > ${dst_dir}/COMMITS_${PATCH_PREVIOUS}_to_${PATCH_TAG}
# Setup the tag symlink
link_target=$(basename "$BUILD_OUTPUT_HOME")
ln -sfn "$link_target" "${PUBLISH_ROOT}/${PATCH_TAG}"
# Publish patch and pre-patched ISO
DEPLOY_DIR=$BUILD_HOME/localdisk/deploy
patch_file=($DEPLOY_DIR/patch_output/*.patch)
iso_file=$DEPLOY_DIR/prepatched-iso-${SW_VERSION}.${PATCH_NUM}.iso
iso_sig=$DEPLOY_DIR/prepatched-iso-${SW_VERSION}.${PATCH_NUM}.sig
declare -a file_list=( $patch_file $iso_file $iso_sig )
for f in ${file_list[@]}; do
publish_file "$f" "$dst_dir" "$checksum_files_list_file" >>"$checksum_file" || exit 1
done
check_pipe_status || exit 1