Retire repository

Fuel (from openstack namespace) and fuel-ccp (in x namespace)
repositories are unused and ready to retire.

This change removes all content from the repository and adds the usual
README file to point out that the repository is retired following the
process from
https://docs.openstack.org/infra/manual/drivers.html#retiring-a-project

See also
http://lists.openstack.org/pipermail/openstack-discuss/2019-December/011647.html

Depends-On: https://review.opendev.org/699362
Change-Id: I0356a8a279150091921a780eab56c4b12bb64db0
This commit is contained in:
Andreas Jaeger 2019-12-18 09:43:41 +01:00
parent 3c81c46ca5
commit f65ad25c69
52 changed files with 9 additions and 3895 deletions

57
.gitignore vendored
View File

@ -1,57 +0,0 @@
*.py[cod]
# C extensions
*.so
# Packages
*.egg
*.egg-info
dist
/build
.eggs
eggs
parts
bin
var
sdist
develop-eggs
.installed.cfg
lib
lib64
# Installer logs
pip-log.txt
# Unit test / coverage reports
.coverage
.tox
nosetests.xml
.testrepository
.venv
# Translations
*.mo
# Mr Developer
.mr.developer.cfg
.project
.pydevproject
# Complexity
output/*.html
output/*/index.html
# Sphinx
doc/build
# pbr generates these
AUTHORS
ChangeLog
# Editors
*~
.*.swp
.*sw?
# Cached filess
.cache/

View File

@ -1,3 +0,0 @@
# Format is:
# <preferred e-mail> <other e-mail 1>
# <preferred e-mail> <other e-mail 2>

View File

@ -1,36 +0,0 @@
---
description:
For Fuel team structure and contribution policy, see [1].
This is repository level MAINTAINERS file. All contributions to this
repository must be approved by one or more Core Reviewers [2].
If you are contributing to files (or create new directories) in
root folder of this repository, please contact Core Reviewers for
review and merge requests.
If you are contributing to subfolders of this repository, please
check 'maintainers' section of this file in order to find maintainers
for those specific modules.
It is mandatory to get +1 from one or more maintainers before asking
Core Reviewers for review/merge in order to decrease a load on Core Reviewers [3].
Exceptions are when maintainers are actually cores, or when maintainers
are not available for some reason (e.g. on vacation).
[1] https://specs.openstack.org/openstack/fuel-specs/policy/team-structure
[2] https://review.openstack.org/#/admin/groups/1016,members
[3] http://lists.openstack.org/pipermail/openstack-dev/2015-August/072406.html
Please keep this file in YAML format in order to allow helper scripts
to read this as a configuration data.
maintainers:
- perestroika/: &build_team
- name: Dmitry Burmistrov
email: dburmistrov@mirantis.com
IRC: dburmistrov
- name: Sergey Kulanov
email: skulanov@mirantis.com
IRC: SergK

View File

@ -1,7 +1,10 @@
====================
Repository structure
====================
This project is no longer maintained.
* perestroika
It is a set shell/python script that are used to build DEB/RPM
packages. These scripts are widely used by Fuel Packaging CI.
The contents of this repository are still available in the Git
source code management system. To see the contents of this
repository before it reached its end of life, please check out the
previous commit with "git checkout HEAD^1".
For any further questions, please email
openstack-discuss@lists.openstack.org or join #openstack-dev on
Freenode.

View File

@ -1,2 +0,0 @@
.package-defaults
.publisher-defaults

View File

@ -1,202 +0,0 @@
#!/bin/bash
set -o errexit
BIN_DIR=$(dirname `readlink -e $0`)
source ${BIN_DIR}/functions/build-functions || exit 1
source ${BIN_DIR}/functions/docker-functions || exit 1
main () {
local repos=()
local pins=()
local pinpkgs=()
local pinprios=()
local KEEP_CHROOT=true
# Parse parameters
while test -n "$1" ; do
case "$1" in
--config-dir|-c)
local CONFIG_DIR=$2
shift 2
;;
--no-keep-chroot| -n)
shift
unset KEEP_CHROOT
;;
--verbose|-v)
shift
VERBOSE=true
local MOCK_OPTS="${MOCK_OPTS} -v"
local BASH_OPTS="${BASH_OPTS} -x"
set -o xtrace
;;
--build|-b)
shift
local ACTION_BUILD=true
;;
--update|-u)
shift
local ACTION_UPDATE=true
;;
--shell|-s)
shift
local ACTION_SHELL=true
;;
--init|-i)
shift
local ACTION_INIT=true
;;
--no-init)
shift
local NO_INIT=true
;;
--repository|--repo|-r)
local repos[${#repos[@]}]="$2";
shift 2;
;;
--pin)
local pins[${#pins[@]}]="$2";
shift 2;
;;
--pin-package)
local pinpkgs[${#pinpkgs[@]}]="$2";
shift 2;
;;
--pin-priority)
local pinprios[${#pinprios[@]}]="$2";
shift 2;
;;
--dist|-d)
local DIST="$2";
shift 2;
;;
--source|--src)
local SOURCE_PATH="$2";
shift 2;
;;
--output|-o)
local DEST_PATH="$2";
shift 2;
;;
*)
fail_exit "ERROR: Unknow parameter \`$1\`"
;;
esac
done
[ -z "${DIST}" ] && fail_exit "ERROR: Distribution is not specified"
[ ${#pins[@]} -ne ${#pinpkgs[@]} \
-o ${#pins[@]} -ne ${#pinprios[@]} ] \
&& fail_exit "ERROR: Inconsistent Apt pinning options"
local CONFIG_DIR=${CONFIG_DIR:-${BIN_DIR}/conf}
local SOURCE_PATH=${SOURCE_PATH:-`pwd`}
local DEST_PATH=${DEST_PATH:-${SOURCE_PATH}/buildresult}
[ -e "${CONFIG_DIR}/common.conf" ] \
&& source ${CONFIG_DIR}/common.conf
source ${CONFIG_DIR}/${DIST}.conf 2>/dev/null \
|| fail_exit "ERROR: Unsupported distribution ${DIST}"
[ -z "${ROOT_NAME}" ] && fail_exit "CONFIG ERROR: Chroot name is not specified"
[ -z "${TYPE}" ] && fail_exit "CONFIG ERROR: Chroot type is not specified"
# Check docker image
local LOCK_FILE=/tmp/${CONTAINER_NAME%%:*}.lock
job_lock ${LOCK_FILE} wait 3600
[ $(docker images | egrep -c "^${CONTAINER_NAME%%:*}") -eq 0 ] \
&& local BUILD_DOCKER_IMAGE=true
[ "${BUILD_DOCKER_IMAGE}" = "true" -a "${NO_INIT}" = "true" ] \
&& fail_exit "ERROR: Can't find docker image for ${CONTAINER_NAME%%:*}"
# Build docker image
[ ! -f "${CONFIG_DIR}/${CONTAINER_NAME%%:*}/Dockerfile" ] \
&& fail_exit "CONFIG ERROR: Unsupported container ${CONTAINER_NAME%%:*}"
if [ "${BUILD_DOCKER_IMAGE}" == "true" ] ; then
docker build -t ${CONTAINER_NAME%%:*} \
${CONFIG_DIR}/${CONTAINER_NAME%%:*}/
fi
job_lock ${LOCK_FILE} unset
# Check chroot
local LOCK_FILE=/tmp/${ROOT_NAME}.lock
job_lock ${LOCK_FILE} wait 3600
[ ! -d "${CACHE_DIR}/${ROOT_NAME}" ] && local ACTION_INIT=true
[ "${ACTION_INIT}" == "true" ] && unset ACTION_UPDATE
[ "${ACTION_INIT}" = "true" -a "${NO_INIT}" = "true" ] \
&& fail_exit "ERROR: Can't find chroot for ${DIST}"
# Init chroot
if [ "${ACTION_INIT}" == "true" ] ; then
case $TYPE in
mock)
docker_init_mock
;;
sbuild)
docker_init_sbuild
;;
*)
fail_exit "CONFIG ERROR: Unsupported distribution type"
;;
esac
fi
# Update chroot
[ "${NO_INIT}" = "true" ] && unset ACTION_UPDATE
if [ "${ACTION_UPDATE}" == "true" ] ; then
case ${TYPE} in
mock)
docker_update_mock
;;
sbuild)
docker_update_sbuild
;;
*)
fail_exit "CONFIG ERROR: Unsupported distribution type"
;;
esac
fi
job_lock ${LOCK_FILE} unset
local UNAME=$(id -u)
local GNAME=$(id -g)
# Build package
if [ "${ACTION_BUILD}" == "true" ] ; then
case ${TYPE} in
mock)
docker_build_mock
;;
sbuild)
docker_build_sbuild
;;
*)
fail_exit "CONFIG ERROR: Unsupported distribution type"
;;
esac
fi
# Get into buildroot
if [ "${ACTION_SHELL}" == "true" ] ; then
case ${TYPE} in
mock)
[ ! -d "${ROOT_DIR}/${ROOT_NAME}/root/etc" ] \
&& fail_exit "ERROR: There is no buildroot for ${DIST}"
docker_shell_mock
;;
sbuild)
[ ! -d "${ROOT_DIR}/${ROOT_NAME}/build" ] \
&& fail_exit "ERROR: There is no buildroot for ${DIST}"
docker_shell_sbuild
;;
*)
fail_exit "CONFIG_ERROR: Unsupported distribution type"
;;
esac
fi
}
main "$@"
cleanup_and_exit 0

View File

@ -1,263 +0,0 @@
#!/bin/bash
set -o xtrace
set -o errexit
[ -f ".packages-defaults" ] && source .packages-defaults
BINDIR=$(dirname `readlink -e $0`)
source "${BINDIR}"/build-functions.sh
main () {
set_default_params
# Get package tree from gerrit
fetch_upstream
local _srcpath="${MYOUTDIR}/${PACKAGENAME}-src"
local _specpath=$_srcpath
local _testspath=$_srcpath
[ "$IS_OPENSTACK" == "true" ] && _specpath="${MYOUTDIR}/${PACKAGENAME}-spec${SPEC_PREFIX_PATH}" && _testspath="${MYOUTDIR}/${PACKAGENAME}-spec"
local _debianpath=$_specpath
if [ -d "${_debianpath}/debian" ] ; then
# Unpacked sources and specs
local srcpackagename=`head -1 ${_debianpath}/debian/changelog | cut -d' ' -f1`
local version_string=$(dpkg-parsechangelog --show-field Version -l${_debianpath}/debian/changelog)
local version=`echo "$version_string" | sed -e 's|\(.*\)-.*|\1|g'`
local epochnumber=`echo "$version_string" | egrep -o "^[0-9]+:"`
local binpackagenames="`cat ${_debianpath}/debian/control | grep ^Package | cut -d' ' -f 2 | tr '\n' ' '`"
local distro=`head -1 ${_debianpath}/debian/changelog | awk -F'[ ;]' '{print $3}'`
local pkg_version="${version#*:}"
# Get last commit info
# $message $author $email $cdate $commitsha $lastgitlog
if [ "$IS_OPENSTACK" == "true" ] ; then
local gitspecsha=$(git -C ${_specpath} log -n 1 --pretty=format:%H)
local gitspecprj=$(git -C ${_specpath} remote -v | head -n 1 | awk '{print $2}' | awk -F '/' '{print $NF}' | sed 's|.git$||' )
fi
get_last_commit_info ${_srcpath}
local gitsrcsha=$(git -C ${_srcpath} log -n 1 --pretty=format:%H)
local gitsrcprj=$(git -C ${_srcpath} remote -v | head -n 1 | awk '{print $2}' | awk -F '/' '{print $NF}' | sed 's|.git$||' )
TAR_NAME="${srcpackagename}_${pkg_version}.orig.tar.gz"
if [ "$IS_OPENSTACK" == "true" ] ; then
# Get version number from the latest git tag for openstack packages
local release_tag=$(git -C $_srcpath describe --abbrev=0 --candidates=1 --match "*[0-9]*" | sed -r 's|^[^0-9]+||')
# Deal with PyPi versions like 2015.1.0rc1
# It breaks version comparison
# Change it to 2015.1.0~rc1
local script_dir=$(dirname $(readlink -e $0))
local convert_version_py="$script_dir/convert_version.py"
if grep -qE "^${SRC_PROJECT}\$" "$script_dir/fuel-projects-list"
then
local version_length=2
fi
version=$(python ${convert_version_py} --tag ${release_tag} \
${version_length:+ -l $version_length})
if [ "${version}" != "${pkg_version}" ] ; then
echo -e "ERROR: Version mismatch. Latest version from Gerrit tag: $version, and from changelog: $pkg_version. Build aborted."
exit 1
fi
# Get revision number as commit count from tag to head of source branch
if [ "$IS_PLUGIN" = "true" ]
then
local _rev=$(git -C $_srcpath rev-list ${release_tag}..origin/${SOURCE_BRANCH} | wc -l)
else
local _rev=$(git -C $_srcpath rev-list --no-merges ${release_tag}..origin/${SOURCE_BRANCH} | wc -l)
fi
[ "$GERRIT_CHANGE_STATUS" == "NEW" ] \
&& [ ${GERRIT_PROJECT} == "${SRC_PROJECT}" ] \
&& _rev=$(( $_rev + 1 ))
[ "$IS_HOTFIX" == "true" ] \
&& _rev=$(get_extra_revision hotfix ${_srcpath} ${release_tag})
[ "$IS_SECURITY" == "true" ] \
&& _rev=$(get_extra_revision security ${_srcpath} ${release_tag})
if [ "$IS_PLUGIN" = "true" ]
then
version=${version}.dev${_rev}
local release=$(dpkg-parsechangelog --show-field Version -l${_debianpath}/debian/changelog | awk -F'-' '{print $NF}')
local ditribution_string=$(dpkg-parsechangelog --show-field Distribution -l${_debianpath}/debian/changelog)
else
local release=$(dpkg-parsechangelog --show-field Version -l${_debianpath}/debian/changelog | awk -F'-' '{print $NF}' | sed -r 's|[0-9]+$||')
local release="${release}${_rev}"
fi
local fullver=${epochnumber}${version}-${release}
local TAR_NAME="${srcpackagename}_${version}.orig.tar.gz"
# Update version and changelog
local firstline=1
local _dchopts="-c ${_debianpath}/debian/changelog"
echo "$lastgitlog" | while read LINE; do
[ $firstline == 1 ] && local cmd="dch $_dchopts -D ${ditribution_string:-$distro} -b --force-distribution -v $fullver" || local cmd="dch $_dchopts -a"
firstline=0
local commitid=`echo "$LINE" | cut -d'|' -f1`
local email=`echo "$LINE" | cut -d'|' -f2`
local author=`echo "$LINE" | cut -d'|' -f3`
local subject=`echo "$LINE" | cut -d'|' -f4`
DEBFULLNAME="$author" DEBEMAIL="$email" $cmd "$commitid $subject"
done
# Prepare source tarball
pushd $_srcpath &>/dev/null
local ignore_list="rally horizon-vendor-theme fuel-astute fuel-library fuel-main fuel-nailgun-agent fuel-ui fuel-web fuel-agent"
if [ "$IS_PLUGIN" = "true" ]
then
git -C ${_srcpath} archive --format tar.gz \
--prefix "${srcpackagename}-${version}/" \
--worktree-attributes -o ${BUILDDIR}/${TAR_NAME} HEAD
elif [ $(echo $ignore_list | grep -Eo "(^| )$PACKAGENAME( |$)") ]; then
# Do not perform `setup.py sdist` for rally packages
tar -czf ${BUILDDIR}/$TAR_NAME $EXCLUDES .
else
# Use virtualenv to deal with different pbr requirements
local venv=$(mktemp -d)
virtualenv "$venv"
source "${venv}/bin/activate"
pip install --upgrade setuptools
python setup.py --version # this will download pbr if it's not available
PBR_VERSION=$release_tag python setup.py sdist -d ${BUILDDIR}/
deactivate
[ -d "$venv" ] && rm -rf "$venv"
# Fix source folder name at sdist tarball
local sdist_tarball=$(find ${BUILDDIR}/ -maxdepth 1 -name "*.gz")
if [ "$(tar -tf $sdist_tarball | head -n 1 | cut -d'/' -f1)" != "${srcpackagename}-${version}" ] ; then
# rename source folder
local tempdir=$(mktemp -d)
tar -C $tempdir -xf $sdist_tarball
mv $tempdir/* $tempdir/${srcpackagename}-${version}
tar -C $tempdir -czf ${BUILDDIR}/$TAR_NAME ${srcpackagename}-${version}
rm -f $sdist_tarball
[ -d "$tempdir" ] && rm -rf $tempdir
else
mv $sdist_tarball ${BUILDDIR}/$TAR_NAME || :
fi
fi
popd &>/dev/null
else
local fullver=$version_string
# Update changelog
DEBFULLNAME=$author DEBEMAIL=$email dch -c ${_debianpath}/debian/changelog -a "$commitsha $message"
# Prepare source tarball
# Exclude debian and tests dir
cat > ${_srcpath}/.gitattributes <<-EOF
/debian export-ignore
/tests export-ignore
/.gitignore export-ignore
/.gitreview export-ignore
EOF
git -C ${_srcpath} archive --format tar.gz --worktree-attributes -o ${BUILDDIR}/${TAR_NAME} HEAD
fi
mkdir -p ${BUILDDIR}/$srcpackagename
cp -R ${_debianpath}/debian ${BUILDDIR}/${srcpackagename}/
else
# Packed sources (.dsc + .gz )
cp ${_srcpath}/* $BUILDDIR
fi
# Prepare tests folder to provide as parameter
rm -f ${WRKDIR}/tests.envfile
[ -d "${_testspath}/tests" ] && echo "TESTS_CONTENT='`tar -cz -C ${_testspath} tests | base64 -w0`'" > ${WRKDIR}/tests.envfile
# Build stage
local REQUEST=$REQUEST_NUM
[ -n "$LP_BUG" ] && REQUEST=$LP_BUG
COMPONENTS="main restricted"
DEB_HOTFIX_DIST_NAME=${DEB_HOTFIX_DIST_NAME:-hotfix}
[ -n "${EXTRAREPO}" ] && EXTRAREPO="${EXTRAREPO}|"
EXTRAREPO="${EXTRAREPO}http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_DIST_NAME} ${COMPONENTS}"
case true in
"$IS_HOTFIX" )
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_HOTFIX_DIST_NAME} ${COMPONENTS}"
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_UPDATES_DIST_NAME} ${COMPONENTS}"
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_SECURITY_DIST_NAME} ${COMPONENTS}"
;;
"$IS_SECURITY" )
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_UPDATES_DIST_NAME} ${COMPONENTS}"
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_SECURITY_DIST_NAME} ${COMPONENTS}"
;;
"$IS_UPDATES" )
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_PROPOSED_DIST_NAME} ${COMPONENTS}"
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_UPDATES_DIST_NAME} ${COMPONENTS}"
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_SECURITY_DIST_NAME} ${COMPONENTS}"
;;
esac
if [ "$GERRIT_CHANGE_STATUS" == "NEW" ] && [ -n "$LP_BUG" -o -n "$CUSTOM_REPO_ID" ] ; then
local DEB_REQUEST_REPO_PATH=${DEB_REQUEST_REPO_PATH:-$DEB_REPO_PATH}
local REMOTE_REQUEST_REPO_HOST=${REMOTE_REQUEST_REPO_HOST:-$REMOTE_REPO_HOST}
case true in
"$IS_HOTFIX" )
local DEB_REQUEST_DIST_NAME=$DEB_HOTFIX_DIST_NAME
;;
"$IS_SECURITY" )
local DEB_REQUEST_DIST_NAME=$DEB_SECURITY_DIST_NAME
;;
"$IS_UPDATES" )
local DEB_REQUEST_DIST_NAME=$DEB_PROPOSED_DIST_NAME
;;
*)
local DEB_REQUEST_DIST_NAME=$DEB_DIST_NAME
;;
esac
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REQUEST_REPO_HOST}/${REPO_REQUEST_PATH_PREFIX}/${REQUEST}/${DEB_REQUEST_REPO_PATH} ${DEB_REQUEST_DIST_NAME} ${COMPONENTS}"
fi
export EXTRAREPO
if [ -n "$EXTRAREPO" ] ; then
local EXTRAPARAMS=""
local OLDIFS="$IFS"
IFS='|'
for repo in $EXTRAREPO; do
IFS="$OLDIFS"
[ -n "$repo" ] && EXTRAPARAMS="${EXTRAPARAMS} --repository \"$repo\""
IFS='|'
done
IFS="$OLDIFS"
fi
local tmpdir=$(mktemp -d ${PKG_DIR}/build-XXXXXXXX)
echo "BUILD_SUCCEEDED=false" > ${WRKDIR}/buildresult.params
bash -c "${WRKDIR}/build \
--verbose \
--no-keep-chroot \
--dist ${DIST} \
--build \
--source $BUILDDIR \
--output $tmpdir \
${EXTRAPARAMS}"
local exitstatus=$(cat ${tmpdir}/exitstatus || echo 1)
[ -f "${tmpdir}/buildlog.sbuild" ] && mv "${tmpdir}/buildlog.sbuild" "${WRKDIR}/buildlog.txt"
fill_buildresult $exitstatus 0 $PACKAGENAME DEB
if [ "$exitstatus" == "0" ] ; then
rm -f ${WRKDIR}/buildresult.params
cat >${WRKDIR}/buildresult.params<<-EOL
BUILD_HOST=`hostname -f`
PKG_PATH=$tmpdir
GERRIT_CHANGE_STATUS=$GERRIT_CHANGE_STATUS
REQUEST_NUM=$REQUEST_NUM
LP_BUG=$LP_BUG
IS_SECURITY=$IS_SECURITY
IS_HOTFIX=$IS_HOTFIX
EXTRAREPO="$EXTRAREPO"
REPO_TYPE=deb
DIST=$DIST
EOL
# Fill yaml file
yaml_report_file=${tmpdir}/${srcpackagename}.yaml
echo "Source: ${srcpackagename}" > $yaml_report_file
echo "Version: ${fullver}" >> $yaml_report_file
echo "Binary:" >> $yaml_report_file
for binary in $(find ${tmpdir}/ -name *deb) ; do
_binary=${binary##*/}
echo " - ${_binary%%_*}" >> $yaml_report_file
done
echo "Build_time: $(date '+%F-%H-%M-%S')" >> $yaml_report_file
echo "Code_project:" >> $yaml_report_file
echo " ${gitsrcprj}: ${gitsrcsha}" >> $yaml_report_file
[ "$IS_OPENSTACK" == "true" ] \
&& echo " ${gitspecprj}: ${gitspecsha}" >> $yaml_report_file
fi
exit $exitstatus
}
main "$@"
exit 0

View File

@ -1,199 +0,0 @@
#!/bin/bash
set -o xtrace
set -o errexit
[ -f .fuel-default ] && source .fuel-default
BINDIR=$(dirname `readlink -e $0`)
source "${BINDIR}"/build-functions.sh
main () {
set_default_params
[ -n "$GERRIT_BRANCH" ] && SOURCE_BRANCH=$GERRIT_BRANCH && SOURCE_REFSPEC=$GERRIT_REFSPEC
[ -n "$GERRIT_PROJECT" ] && SRC_PROJECT=$GERRIT_PROJECT
PACKAGENAME=${SRC_PROJECT##*/}
local DEBSPECFILES="${PACKAGENAME}-src/debian"
# If we are triggered from gerrit env, let's keep current workflow,
# and fetch code from upstream
# otherwise let's define custom path to already prepared source code
# using $CUSTOM_SRC_PATH variable
if [ -n "${GERRIT_BRANCH}" ]; then
# Get package tree from gerrit
fetch_upstream
local _srcpath="${MYOUTDIR}/${PACKAGENAME}-src"
else
local _srcpath="${CUSTOM_SRC_PATH}"
fi
local _specpath=$_srcpath
local _debianpath=$_specpath
if [ -d "${_debianpath}/debian" ] ; then
# Unpacked sources and specs
local srcpackagename=`head -1 ${_debianpath}/debian/changelog | cut -d' ' -f1`
local version_string=$(dpkg-parsechangelog --show-field Version -l${_debianpath}/debian/changelog)
local version=`echo "$version_string" | sed -e 's|\(.*\)-.*|\1|g'`
local binpackagenames="`cat ${_debianpath}/debian/control | grep ^Package | cut -d' ' -f 2 | tr '\n' ' '`"
local distro=`head -1 ${_debianpath}/debian/changelog | awk -F'[ ;]' '{print $3}'`
# Get last commit info
# $message $author $email $cdate $commitsha $lastgitlog
get_last_commit_info ${_srcpath}
local gitsrcsha=$(git -C ${_srcpath} log -n 1 --pretty=format:%H)
local gitsrcprj=$(git -C ${_srcpath} remote -v | head -n 1 | awk '{print $2}' | awk -F '/' '{print $NF}' | sed 's|.git$||' )
# Get revision number as commit count for src+spec projects
local _rev=`git -C $_srcpath rev-list --no-merges origin/${SOURCE_BRANCH} | wc -l`
[ "$GERRIT_CHANGE_STATUS" == "NEW" ] && _rev=$(( $_rev + 1 ))
[ "$IS_HOTFIX" == "true" ] \
&& _rev=$(get_extra_revision hotfix ${_srcpath})
# if gitshasrc is not defined (we are not using fetch_upstream), let's do it
[ -n "${gitshasrc}" ] || local gitshasrc=$(git -C $_srcpath log -1 --pretty="%h")
if [ "$GERRIT_CHANGE_STATUS" == "NEW" ] ; then
local OVERRIDE_PKG_REVISION=${OVERRIDE_PKG_REVISION:-1}
local release="${OVERRIDE_PKG_REVISION}~u14.04+mos${_rev}+git.${gitshasrc}"
else
local release="1~u14.04+mos${_rev}"
fi
local fullver=${version}-${release}
# Update version and changelog
local firstline=1
local _dchopts="-c ${_debianpath}/debian/changelog"
echo "$lastgitlog" | while read LINE; do
[ $firstline == 1 ] && local cmd="dch $_dchopts -D $distro -b --force-distribution -v $fullver" || local cmd="dch $_dchopts -a"
firstline=0
local commitid=`echo "$LINE" | cut -d'|' -f1`
local email=`echo "$LINE" | cut -d'|' -f2`
local author=`echo "$LINE" | cut -d'|' -f3`
local subject=`echo "$LINE" | cut -d'|' -f4`
DEBFULLNAME="$author" DEBEMAIL="$email" $cmd "$commitid $subject"
done
TAR_NAME="${srcpackagename}_${version#*:}.orig.tar.gz"
# Update changelog
DEBFULLNAME=$author DEBEMAIL=$email dch -c ${_debianpath}/debian/changelog -a "$commitsha $message"
# Prepare source tarball
# Exclude debian dir
pushd $_srcpath &>/dev/null
cat >.gitattributes<<-EOF
/debian export-ignore
/.gitignore export-ignore
/.gitreview export-ignore
EOF
git archive --prefix=./ --format=tar.gz --worktree-attributes HEAD --output="${BUILDDIR}/${TAR_NAME}"
popd &>/dev/null
mkdir -p ${BUILDDIR}/$srcpackagename
cp -R ${_debianpath}/debian ${BUILDDIR}/${srcpackagename}/
fi
# Build stage
local REQUEST=$REQUEST_NUM
[ -n "$LP_BUG" ] && REQUEST=$LP_BUG
COMPONENTS="main restricted"
DEB_HOTFIX_DIST_NAME=${DEB_HOTFIX_DIST_NAME:-hotfix}
[ -n "${EXTRAREPO}" ] && EXTRAREPO="${EXTRAREPO}|"
EXTRAREPO="${EXTRAREPO}http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_DIST_NAME} ${COMPONENTS}"
case true in
"$IS_HOTFIX" )
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_HOTFIX_DIST_NAME} ${COMPONENTS}"
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_UPDATES_DIST_NAME} ${COMPONENTS}"
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_SECURITY_DIST_NAME} ${COMPONENTS}"
;;
"$IS_SECURITY" )
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_UPDATES_DIST_NAME} ${COMPONENTS}"
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_SECURITY_DIST_NAME} ${COMPONENTS}"
;;
"$IS_UPDATES" )
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_PROPOSED_DIST_NAME} ${COMPONENTS}"
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_UPDATES_DIST_NAME} ${COMPONENTS}"
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REPO_HOST}/${DEB_REPO_PATH} ${DEB_SECURITY_DIST_NAME} ${COMPONENTS}"
;;
esac
if [ "$GERRIT_CHANGE_STATUS" == "NEW" ] && [ -n "$LP_BUG" -o -n "$CUSTOM_REPO_ID" ] ; then
local DEB_REQUEST_REPO_PATH=${DEB_REQUEST_REPO_PATH:-$DEB_REPO_PATH}
local REMOTE_REQUEST_REPO_HOST=${REMOTE_REQUEST_REPO_HOST:-$REMOTE_REPO_HOST}
case true in
"$IS_HOTFIX" )
local DEB_REQUEST_DIST_NAME=$DEB_HOTFIX_DIST_NAME
;;
"$IS_SECURITY" )
local DEB_REQUEST_DIST_NAME=$DEB_SECURITY_DIST_NAME
;;
"$IS_UPDATES" )
local DEB_REQUEST_DIST_NAME=$DEB_PROPOSED_DIST_NAME
;;
*)
local DEB_REQUEST_DIST_NAME=$DEB_DIST_NAME
;;
esac
EXTRAREPO="${EXTRAREPO}|http://${REMOTE_REQUEST_REPO_HOST}/${REPO_REQUEST_PATH_PREFIX}/${REQUEST}/${DEB_REQUEST_REPO_PATH} ${DEB_REQUEST_DIST_NAME} ${COMPONENTS}"
fi
export EXTRAREPO
if [ -n "$EXTRAREPO" ] ; then
local EXTRAPARAMS=""
local OLDIFS="$IFS"
IFS='|'
for repo in $EXTRAREPO; do
IFS="$OLDIFS"
[ -n "$repo" ] && EXTRAPARAMS="${EXTRAPARAMS} --repository \"$repo\""
IFS='|'
done
IFS="$OLDIFS"
fi
local tmpdir=$(mktemp -d ${PKG_DIR}/build-XXXXXXXX)
echo "BUILD_SUCCEEDED=false" > ${WRKDIR}/buildresult.params
bash -c "${WRKDIR}/build \
--verbose \
--no-keep-chroot \
--dist ${DIST} \
--build \
--source $BUILDDIR \
--output $tmpdir \
${EXTRAPARAMS}"
local exitstatus=$(cat ${tmpdir}/exitstatus || echo 1)
[ -f "${tmpdir}/buildlog.sbuild" ] && mv "${tmpdir}/buildlog.sbuild" "${WRKDIR}/buildlog.txt"
fill_buildresult $exitstatus 0 $PACKAGENAME DEB
if [ "$exitstatus" == "0" ] && [ -n "${GERRIT_BRANCH}" ]; then
rm -f ${WRKDIR}/buildresult.params
cat >${WRKDIR}/buildresult.params<<-EOL
BUILD_HOST=`hostname -f`
PKG_PATH=$tmpdir
GERRIT_CHANGE_STATUS=$GERRIT_CHANGE_STATUS
REQUEST_NUM=$REQUEST_NUM
LP_BUG=$LP_BUG
IS_HOTFIX=$IS_HOTFIX
IS_SECURITY=$IS_SECURITY
EXTRAREPO="$EXTRAREPO"
REPO_TYPE=deb
DIST=$DIST
EOL
# Fill yaml file
yaml_report_file=${tmpdir}/${srcpackagename}.yaml
echo "Source: ${srcpackagename}" > $yaml_report_file
echo "Version: ${fullver}" >> $yaml_report_file
echo "Binary:" >> $yaml_report_file
for binary in $(find ${tmpdir}/ -name *deb) ; do
_binary=${binary##*/}
echo " - ${_binary%%_*}" >> $yaml_report_file
done
echo "Build_time: $(date '+%F-%H-%M-%S')" >> $yaml_report_file
echo "Code_project:" >> $yaml_report_file
echo " ${gitsrcprj}: ${gitsrcsha}" >> $yaml_report_file
fi
echo "Packages: $PACKAGENAME"
exit $exitstatus
}
main $@
exit 0

View File

@ -1,206 +0,0 @@
#!/bin/bash
set -o xtrace
set -o errexit
[ -f .fuel-default ] && source .fuel-default
BINDIR=$(dirname `readlink -e $0`)
source "${BINDIR}"/build-functions.sh
main () {
set_default_params
[ -n "$GERRIT_BRANCH" ] && SOURCE_BRANCH=$GERRIT_BRANCH && SOURCE_REFSPEC=$GERRIT_REFSPEC
[ -n "$GERRIT_PROJECT" ] && SRC_PROJECT=$GERRIT_PROJECT
PACKAGENAME=${SRC_PROJECT##*/}
# If we are triggered from gerrit env, let's keep current workflow,
# and fetch code from upstream
# otherwise let's define custom path to already prepared source code
# using $CUSTOM_SRC_PATH variable
if [ -n "${GERRIT_BRANCH}" ]; then
# Get package tree from gerrit
fetch_upstream
local _srcpath="${MYOUTDIR}/${PACKAGENAME}-src"
else
local _srcpath="${CUSTOM_SRC_PATH}"
fi
local _specpath="${_srcpath}/specs"
# Get last commit info
# $message $author $email $cdate $commitsha $lastgitlog
get_last_commit_info ${_srcpath}
local gitsrcsha=$(git -C ${_srcpath} log -n 1 --pretty=format:%H)
local gitsrcprj=$(git -C ${_srcpath} remote -v | head -n 1 | awk '{print $2}' | awk -F '/' '{print $NF}' | sed 's|.git$||' )
# Update specs
local specfile=`find $_specpath -name *.spec`
local version=`rpm -q --specfile $specfile --queryformat '%{VERSION}\n' | head -1`
local release=`rpm -q --specfile $specfile --queryformat '%{RELEASE}\n' | head -1`
## Add changelog section if it doesn't exist
[ `cat ${specfile} | grep -c '^%changelog'` -eq 0 ] && echo "%changelog" >> ${specfile}
local _rev=`git -C $_srcpath rev-list --no-merges origin/${SOURCE_BRANCH} | wc -l`
# if gitshasrc is not defined (we are not using fetch_upstream), let's do it
[ -n "${gitshasrc}" ] || local gitshasrc=$(git -C $_srcpath log -1 --pretty="%h")
[ "$GERRIT_CHANGE_STATUS" == "NEW" ] && _rev=$(( $_rev + 1 ))
[ "$IS_HOTFIX" == "true" ] \
&& _rev=$(get_extra_revision hotfix ${_srcpath})
if [ "$GERRIT_CHANGE_STATUS" == "NEW" ] ; then
local OVERRIDE_PKG_REVISION=${OVERRIDE_PKG_REVISION:-1}
local release="${OVERRIDE_PKG_REVISION}.mos${_rev}.git.${gitshasrc}"
else
local release="1.mos${_rev}"
fi
local TAR_NAME=${PACKAGENAME}-${version}.tar.gz
# Update version and changelog
sed -i "s|Version:.*$|Version: ${version}|" $specfile
sed -i "s|Release:.*$|Release: ${release}|" $specfile
sed -i "s|Source0:.*$|Source0: ${TAR_NAME}|" $specfile
## Update changelog
local firstline=1
if [ ! -z "$lastgitlog" ]; then
sed -i "/%changelog/i%newchangelog" ${specfile}
echo "$lastgitlog" | while read LINE; do
local commitid=`echo "$LINE" | cut -d'|' -f1`
local email=`echo "$LINE" | cut -d'|' -f2`
local author=`echo "$LINE" | cut -d'|' -f3`
# Get current date to avoid wrong chronological order in %changelog section
local date=`LC_TIME=C date +"%a %b %d %Y"`
local subject=`echo "$LINE" | cut -d'|' -f4`
[ $firstline == 1 ] && sed -i "/%changelog/i\* $date $author \<${email}\> \- ${version}-${release}" ${specfile}
sed -i "/%changelog/i\- $commitid $subject" ${specfile}
firstline=0
done
fi
sed -i '/%changelog/i\\' ${specfile}
sed -i '/^%changelog/d' ${specfile}
sed -i 's|^%newchangelog|%changelog|' ${specfile}
cp ${specfile} ${BUILDDIR}/
# Prepare source tarball
pushd $_srcpath &>/dev/null
git archive --format tar --worktree-attributes HEAD > ${BUILDDIR}/${PACKAGENAME}.tar
git rev-parse HEAD > ${BUILDDIR}/version.txt
pushd $BUILDDIR &>/dev/null
tar -rf ${PACKAGENAME}.tar version.txt
gzip -9 ${PACKAGENAME}.tar
mv ${PACKAGENAME}.tar.gz ${PACKAGENAME}-${version}.tar.gz
[ -f version.txt ] && rm -f version.txt
popd &>/dev/null
popd &>/dev/null
# Build stage
local REQUEST=$REQUEST_NUM
[ -n "$LP_BUG" ] && REQUEST=$LP_BUG
RPM_HOTFIX_REPO_PATH=${RPM_HOTFIX_REPO_PATH:-${RPM_OS_REPO_PATH%/*}/hotfix}
[ -n "${EXTRAREPO}" ] && EXTRAREPO="${EXTRAREPO}|"
EXTRAREPO="${EXTRAREPO}repo1,http://${REMOTE_REPO_HOST}/${RPM_OS_REPO_PATH}/x86_64"
case true in
"$IS_HOTFIX" )
EXTRAREPO="${EXTRAREPO}|repo2,http://${REMOTE_REPO_HOST}/${RPM_HOTFIX_REPO_PATH}/x86_64"
EXTRAREPO="${EXTRAREPO}|repo3,http://${REMOTE_REPO_HOST}/${RPM_UPDATES_REPO_PATH}/x86_64"
EXTRAREPO="${EXTRAREPO}|repo4,http://${REMOTE_REPO_HOST}/${RPM_SECURITY_REPO_PATH}/x86_64"
;;
"$IS_SECURITY" )
EXTRAREPO="${EXTRAREPO}|repo2,http://${REMOTE_REPO_HOST}/${RPM_UPDATES_REPO_PATH}/x86_64"
EXTRAREPO="${EXTRAREPO}|repo3,http://${REMOTE_REPO_HOST}/${RPM_SECURITY_REPO_PATH}/x86_64"
;;
"$IS_UPDATES" )
EXTRAREPO="${EXTRAREPO}|repo2,http://${REMOTE_REPO_HOST}/${RPM_PROPOSED_REPO_PATH}/x86_64"
EXTRAREPO="${EXTRAREPO}|repo3,http://${REMOTE_REPO_HOST}/${RPM_UPDATES_REPO_PATH}/x86_64"
EXTRAREPO="${EXTRAREPO}|repo4,http://${REMOTE_REPO_HOST}/${RPM_SECURITY_REPO_PATH}/x86_64"
;;
esac
if [ "$GERRIT_CHANGE_STATUS" == "NEW" ] && [ -n "$LP_BUG" -o -n "$CUSTOM_REPO_ID" ] ; then
local REMOTE_REQUEST_REPO_HOST=${REMOTE_REQUEST_REPO_HOST:-$REMOTE_REPO_HOST}
local RPM_REQUEST_HOTFIX_REPO_PATH=${RPM_REQUEST_HOTFIX_REPO_PATH:-$RPM_HOTFIX_REPO_PATH}
local RPM_REQUEST_SECURITY_REPO_PATH=${RPM_REQUEST_SECURITY_REPO_PATH:-$RPM_SECURITY_REPO_PATH}
local RPM_REQUEST_PROPOSED_REPO_PATH=${RPM_REQUEST_PROPOSED_REPO_PATH:-$RPM_PROPOSED_REPO_PATH}
local RPM_REQUEST_OS_REPO_PATH=${RPM_REQUEST_OS_REPO_PATH:-$RPM_OS_REPO_PATH}
case true in
"$IS_HOTFIX" )
local RPM_REQUEST_REPO_PATH=$RPM_REQUEST_HOTFIX_REPO_PATH
;;
"$IS_SECURITY" )
local RPM_REQUEST_REPO_PATH=$RPM_REQUEST_SECURITY_REPO_PATH
;;
"$IS_UPDATES" )
local RPM_REQUEST_REPO_PATH=$RPM_REQUEST_PROPOSED_REPO_PATH
;;
* )
local RPM_REQUEST_REPO_PATH=$RPM_REQUEST_OS_REPO_PATH
;;
esac
EXTRAREPO="${EXTRAREPO}|repo5,http://${REMOTE_REQUEST_REPO_HOST}/${REPO_REQUEST_PATH_PREFIX}/${REQUEST}/${RPM_REQUEST_REPO_PATH}/x86_64"
fi
export EXTRAREPO
if [ -n "$EXTRAREPO" ] ; then
local EXTRAPARAMS=""
local OLDIFS="$IFS"
IFS='|'
for repo in $EXTRAREPO; do
IFS="$OLDIFS"
[ -n "$repo" ] && EXTRAPARAMS="${EXTRAPARAMS} --repository ${repo#*,}"
IFS='|'
done
IFS="$OLDIFS"
fi
local tmpdir=$(mktemp -d ${PKG_DIR}/build-XXXXXXXX)
echo "BUILD_SUCCEEDED=false" > ${WRKDIR}/buildresult.params
bash -c "${WRKDIR}/build \
--verbose \
--no-keep-chroot \
--dist ${DIST} \
--build \
--source $BUILDDIR \
--output $tmpdir \
${EXTRAPARAMS}"
local exitstatus=$(cat ${tmpdir}/exitstatus || echo 1)
[ -f "${tmpdir}/build.log" ] && mv "${tmpdir}/build.log" "${WRKDIR}/buildlog.txt"
[ -f "${tmpdir}/root.log" ] && mv "${tmpdir}/root.log" "${WRKDIR}/rootlog.txt"
fill_buildresult $exitstatus 0 $PACKAGENAME RPM
if [ "$exitstatus" == "0" ] && [ -n "${GERRIT_BRANCH}" ]; then
rm -f ${WRKDIR}/buildresult.params
cat >${WRKDIR}/buildresult.params<<-EOL
BUILD_HOST=`hostname -f`
PKG_PATH=$tmpdir
GERRIT_CHANGE_STATUS=$GERRIT_CHANGE_STATUS
REQUEST_NUM=$REQUEST_NUM
LP_BUG=$LP_BUG
IS_HOTFIX=$IS_HOTFIX
IS_SECURITY=$IS_SECURITY
EXTRAREPO="$EXTRAREPO"
REPO_TYPE=rpm
DIST=$DIST
EOL
# Fill yaml file
local srpmfile=$(find ${tmpdir}/ -name *.src.rpm)
local srcpackagename=$(rpm -qp $srpmfile --queryformat %{NAME}"\n" | head -1)
local newrelease=$(rpm -qp $srpmfile --queryformat %{RELEASE}"\n" | head -1)
local yaml_report_file=${tmpdir}/${srcpackagename}.yaml
echo "Source: ${srcpackagename}" > $yaml_report_file
echo "Version: ${version}-${newrelease}" >> $yaml_report_file
echo "Binary:" >> $yaml_report_file
for binary in $(find ${tmpdir}/ -name *.rpm | egrep -v '\.src\.rpm$') ; do
local binary_name=$(rpm -qp $binary --queryformat %{NAME}"\n" | head -1)
echo " - ${binary_name}" >> $yaml_report_file
done
echo "Build_time: $(date '+%F-%H-%M-%S')" >> $yaml_report_file
echo "Code_project:" >> $yaml_report_file
echo " ${gitsrcprj}: ${gitsrcsha}" >> $yaml_report_file
fi
echo "Packages: $PACKAGENAME"
exit $exitstatus
}
main $@
exit 0

View File

@ -1,297 +0,0 @@
#!/bin/bash
[ -z "$GERRIT_USER" ] && GERRIT_USER='openstack-ci-jenkins'
[ -z "$GERRIT_HOST" ] && GERRIT_HOST=$gerrit_host
[ -z "$GERRIT_PORT" ] && GERRIT_PORT=$gerrit_port
[ -z "$GERRIT_PORT" ] && GERRIT_PORT=29418
[ -z "$GERRIT_SCHEME" ] && GERRIT_SCHEME="ssh"
URL="${GERRIT_SCHEME}://${GERRIT_USER}@${GERRIT_HOST}:${GERRIT_PORT}"
GITDATA=${HOME}/gitdata/$GERRIT_HOST
METADATA=${HOME}/repometadata
PKG_DIR=${HOME}/built_packages
EXCLUDES='--exclude-vcs'
WRKDIR=`pwd`
MYOUTDIR=${WRKDIR}/wrk-build
BUILDDIR=${MYOUTDIR}/src-to-build
rm -rf $BUILDDIR
mkdir -p $BUILDDIR
[ ! -d "$PKG_DIR" ] && mkdir -p $PKG_DIR
[ -f "${WRKDIR}/buildlog.txt" ] && rm -f ${WRKDIR}/buildlog.txt
error () {
echo
echo -e "ERROR: $*"
echo
exit 1
}
info () {
echo
echo -e "INFO: $*"
echo
}
job_lock() {
local LOCKFILE=$1
local TIMEOUT=600
shift
fd=15
eval "exec $fd>$LOCKFILE"
if [ "$1" = "set" ]; then
flock --timeout $TIMEOUT -x $fd
elif [ "$1" = "unset" ]; then
flock -u $fd
fi
}
request_is_merged () {
local REF=$1
local CHANGENUMBER=`echo $REF | cut -d '/' -f4`
local result=1
local status=`ssh ${GERRIT_USER}@${GERRIT_HOST} -p $GERRIT_PORT gerrit query --format=TEXT $CHANGENUMBER | egrep -o " +status:.*" | awk -F': ' '{print $2}'`
[ "$status" == "MERGED" ] && local result=0
return $result
}
set_default_params () {
[ -z "$PROJECT_NAME" ] && error "Project name is not defined! Exiting!"
[ -z "$PROJECT_VERSION" ] && error "Project version is not defined! Exiting!"
[ "$IS_HOTFIX" == "true" -a "$IS_UPDATES" == "false" ] && error "ERROR: Hotfix update before release"
if [ -n "$GERRIT_PROJECT" ]; then
if [ -z "$GERRIT_CHANGE_STATUS" ] ; then
# Detect change status
GERRIT_CHANGE_STATUS="NEW"
if [ -n "$GERRIT_REFSPEC" ]; then
request_is_merged $GERRIT_REFSPEC && GERRIT_CHANGE_STATUS="MERGED"
else
# Support ref-updated gerrit event
GERRIT_CHANGE_STATUS="REF_UPDATED"
GERRIT_BRANCH=$GERRIT_REFNAME
fi
fi
if [ -n "$GERRIT_CHANGE_COMMIT_MESSAGE" ] ; then
local GERRIT_MEGGASE="`echo $GERRIT_CHANGE_COMMIT_MESSAGE | base64 -d || :`"
fi
if [ "$GERRIT_CHANGE_STATUS" == "NEW" ] ; then
REQUEST_NUM="CR-$GERRIT_CHANGE_NUMBER"
local _LP_BUG=`echo "$GERRIT_TOPIC" | egrep -o "group/[0-9]+" | cut -d'/' -f2`
#[ -z "$_LP_BUG" ] && _LP_BUG=`echo "$GERRIT_MEGGASE" | egrep -i -o "(closes|partial|related)-bug: ?#?[0-9]+" | sort -u | head -1 | awk -F'[: #]' '{print $NF}'`
[ -n "$_LP_BUG" ] && LP_BUG="LP-$_LP_BUG"
if [ -n "${CUSTOM_REPO_ID}" ] ; then
unset LP_BUG
REQUEST_NUM=${CUSTOM_REPO_ID}
fi
fi
# Detect packagename
PACKAGENAME=${GERRIT_PROJECT##*/}
[ "${PACKAGENAME##*-}" == "build" ] && PACKAGENAME=${PACKAGENAME%-*}
SRC_PROJECT=${SRC_PROJECT_PATH}/$PACKAGENAME
[ "$IS_OPENSTACK" == "true" ] && SPEC_PROJECT=${SPEC_PROJECT_PATH}/${PACKAGENAME}${SPEC_PROJECT_SUFFIX}
case $GERRIT_PROJECT in
"$SRC_PROJECT" ) SOURCE_REFSPEC=$GERRIT_REFSPEC ;;
"$SPEC_PROJECT" ) SPEC_REFSPEC=$GERRIT_REFSPEC ;;
esac
SOURCE_BRANCH=$GERRIT_BRANCH
[ "$IS_OPENSTACK" == "true" ] && SPEC_BRANCH=$GERRIT_BRANCH
fi
[ -z "$PACKAGENAME" ] && error "Package name is not defined! Exiting!"
[ -z "$SOURCE_BRANCH" ] && error "Source branch is not defined! Exiting!"
[ "$IS_OPENSTACK" == "true" ] && [ -z "$SPEC_BRANCH" ] && SPEC_BRANCH=$SOURCE_BRANCH
[ "$IS_OPENSTACK" == "true" ] && SPEC_PROJECT=${SPEC_PROJECT_PATH}/${PACKAGENAME}${SPEC_PROJECT_SUFFIX}
SRC_PROJECT=${SRC_PROJECT_PATH}/$PACKAGENAME
}
fetch_upstream () {
# find corresponding requests
if [ -n "$SPEC_PROJECT" -a "${GERRIT_TOPIC%/*}" = "spec" ] ; then
local CORR_GERRIT_PROJECT=$SRC_PROJECT
[ "$GERRIT_PROJECT" == "$SRC_PROJECT" ] && CORR_GERRIT_PROJECT=$SPEC_PROJECT
local search_string="topic:${GERRIT_TOPIC} branch:${GERRIT_BRANCH} project:${CORR_GERRIT_PROJECT} -status:abandoned"
local CORR_CHANGE=`ssh -p $GERRIT_PORT ${GERRIT_USER}@$GERRIT_HOST gerrit query --current-patch-set \'${search_string}\'`
local CORR_CHANGE_REFSPEC="`echo \"${CORR_CHANGE}\" | grep 'ref:' | awk '{print $NF}'`"
local CORR_CHANGE_NUMBER=`echo $CORR_CHANGE_REFSPEC | cut -d'/' -f4`
local CORR_PATCHSET_NUMBER=`echo $CORR_CHANGE_REFSPEC | cut -d'/' -f5`
local CORR_CHANGE_URL=`echo "${CORR_CHANGE}" | grep 'url:' | awk '{print $NF}'`
local CORR_CHANGE_STATUS=`echo "${CORR_CHANGE}" | grep 'status:' | awk '{print $NF}'`
local corr_ref_count=`echo "$CORR_CHANGE_REFSPEC" | wc -l`
[ $corr_ref_count -gt 1 ] && error "ERROR: Multiple corresponding changes found!"
if [ -n "$CORR_CHANGE_NUMBER" ] ; then
# Provide corresponding change to vote script
cat > ${WRKDIR}/corr.setenvfile <<-EOL
CORR_CHANGE_NUMBER=$CORR_CHANGE_NUMBER
CORR_PATCHSET_NUMBER=$CORR_PATCHSET_NUMBER
CORR_CHANGE_URL=$CORR_CHANGE_URL
CORR_CHANGE_REFSPEC=$CORR_CHANGE_REFSPEC
EOL
fi
# Do not perform build stage if corresponding CR is not merged
if [ -n "${CORR_CHANGE_STATUS}" ] && [ "$GERRIT_CHANGE_STATUS" == "MERGED" ] && [ "$CORR_CHANGE_STATUS" != "MERGED" ] ; then
echo "SKIPPED=1" >> ${WRKDIR}/corr.setenvfile
error "Skipping build due to unmerged status of corresponding change ${CORR_CHANGE_URL}"
fi
fi
# Do not clone projects every time. It makes gerrit sad. Cache it!
for prj in $SRC_PROJECT $SPEC_PROJECT; do
# Update code base cache
[ -d ${GITDATA} ] || mkdir -p ${GITDATA}
if [ ! -d ${GITDATA}/$prj ]; then
info "Cache for $prj doesn't exist. Cloning to ${HOME}/gitdata/$prj"
mkdir -p ${GITDATA}/$prj
# Lock cache directory
job_lock ${GITDATA}/${prj}.lock set
pushd ${GITDATA} &>/dev/null
info "Cloning sources from $URL/$prj.git ..."
git clone "$URL/$prj.git" "$prj"
popd &>/dev/null
else
# Lock cache directory
job_lock ${GITDATA}/${prj}.lock set
info "Updating cache for $prj"
pushd ${GITDATA}/$prj &>/dev/null
info "Fetching sources from $URL/$prj.git ..."
# Replace git remote user
local remote=`git remote -v | head -1 | awk '{print $2}' | sed "s|//.*@|//${GERRIT_USER}@|"`
git remote rm origin
git remote add origin $remote
# Update gitdata
git fetch --all
popd &>/dev/null
fi
if [ "$prj" == "$SRC_PROJECT" ]; then
local _DIRSUFFIX=src
local _BRANCH=$SOURCE_BRANCH
[ -n "$SOURCE_REFSPEC" ] && local _REFSPEC=$SOURCE_REFSPEC
fi
if [ "$prj" == "$SPEC_PROJECT" ]; then
local _DIRSUFFIX=spec
local _BRANCH=$SPEC_BRANCH
[ -n "$SPEC_REFSPEC" ] && local _REFSPEC=$SPEC_REFSPEC
fi
[ -e "${MYOUTDIR}/${PACKAGENAME}-${_DIRSUFFIX}" ] && rm -rf "${MYOUTDIR}/${PACKAGENAME}-${_DIRSUFFIX}"
info "Getting $_DIRSUFFIX from $URL/$prj.git ..."
cp -R ${GITDATA}/${prj} ${MYOUTDIR}/${PACKAGENAME}-${_DIRSUFFIX}
# Unlock cache directory
job_lock ${GITDATA}/${prj}.lock unset
pushd ${MYOUTDIR}/${PACKAGENAME}-${_DIRSUFFIX} &>/dev/null
switch_to_revision $_BRANCH
# Get code from HEAD if change is merged
[ "$GERRIT_CHANGE_STATUS" == "MERGED" ] && unset _REFSPEC
# If _REFSPEC specified switch to it
if [ -n "$_REFSPEC" ] ; then
switch_to_changeset $prj $_REFSPEC
else
[ "$prj" == "${CORR_GERRIT_PROJECT}" ] && [ -n "${CORR_CHANGE_REFSPEC}" ] && switch_to_changeset $prj $CORR_CHANGE_REFSPEC
fi
popd &>/dev/null
case $_DIRSUFFIX in
src) gitshasrc=$gitsha
;;
spec) gitshaspec=$gitsha
;;
*) error "Unknown project type"
;;
esac
unset _DIRSUFFIX
unset _BRANCH
unset _REFSPEC
done
}
switch_to_revision () {
info "Switching to branch $*"
if ! git checkout $*; then
error "$* not accessible by default clone/fetch"
else
git reset --hard origin/$*
gitsha=`git log -1 --pretty="%h"`
fi
}
switch_to_changeset () {
info "Switching to changeset $2"
git fetch "$URL/$1.git" $2
git checkout FETCH_HEAD
gitsha=`git log -1 --pretty="%h"`
}
get_last_commit_info () {
if [ -n "$1" ] ; then
pushd $1 &>/dev/null
message="$(git log -n 1 --pretty=format:%B)"
author=$(git log -n 1 --pretty=format:%an)
email=$(git log -n 1 --pretty=format:%ae)
cdate=$(git log -n 1 --pretty=format:%ad | cut -d' ' -f1-3,5)
commitsha=$(git log -n 1 --pretty=format:%h)
lastgitlog=$(git log --pretty="%h|%ae|%an|%s" -n 10)
popd &>/dev/null
fi
}
get_extra_revision () {
local type=$1
local _srcpath=$2
[ -n "$3" ] && local release_tag=$3
case "$type" in
security)
local _prefix="0."
unset _suffix
;;
hotfix)
unset _prefix
local _suffix=".0"
;;
esac
# hotfix branch name for openstack projects should be like
# "{stable_branch_name}-hotfix-<id>"
# security branch name for openstack projects should be like
# "{stable_branch_name}-security-<id>"
# Get parent branch
local _parent_branch=$(echo "$SOURCE_BRANCH" | sed -r "s|-${type}-.*$||")
[ $(git -C "$_srcpath" branch -a | fgrep -c "origin/$_parent_branch") -eq 0 ] && error "Can't find parent source branch"
# Get common ancestor
local _merge_base=$(git -C "$_srcpath" merge-base "origin/$_parent_branch" "origin/$SOURCE_BRANCH")
# Calculate ancestor revision
if [ -n "$release_tag" ] ; then
local _base_rev=$(git -C "$_srcpath" rev-list --no-merges "$release_tag".."$_merge_base" | wc -l)
else
local _base_rev=$(git -C "$_srcpath" rev-list --no-merges "$_merge_base" | wc -l)
fi
# Calculate delta revision
local _delta_rev=$(( $_rev - $_base_rev ))
local _rev=${_base_rev}.${_prefix}${_delta_rev}${_suffix}
echo "$_rev"
}
fill_buildresult () {
#$status $time $PACKAGENAME $pkgtype
local status=$1
local time=$2
local packagename=$3
local pkgtype=$4
local xmlfilename=${WRKDIR}/buildresult.xml
local failcnt=0
local buildstat="Succeeded"
[ "$status" != "0" ] && failcnt=1 && buildstat="Failed"
echo "<testsuite name=\"Package build\" tests=\"Package build\" errors=\"0\" failures=\"$failcnt\" skip=\"0\">" > $xmlfilename
echo -n "<testcase classname=\"$pkgtype\" name=\"$packagename\" time=\"0\"" >> $xmlfilename
if [ "$failcnt" == "0" ] ; then
echo "/>" >> $xmlfilename
else
echo ">" >> $xmlfilename
echo "<failure type=\"Failure\" message=\"$buildstat\">" >> $xmlfilename
if [ -f "${WRKDIR}/buildlog.txt" ] ; then
cat ${WRKDIR}/buildlog.txt | sed -n '/^dpkg: error/,/^Package installation failed/p' | egrep -v '^Get|Selecting|Unpacking|Preparing' >> $xmlfilename || :
cat ${WRKDIR}/buildlog.txt | sed -n '/^The following information may help to resolve the situation/,/^Package installation failed/p' >> $xmlfilename || :
cat ${WRKDIR}/buildlog.txt | grep -B 20 '^dpkg-buildpackage: error' >> $xmlfilename || :
cat ${WRKDIR}/buildlog.txt | grep -B 20 '^EXCEPTION:' >> $xmlfilename || :
fi
if [ -f "${WRKDIR}/rootlog.txt" ] ; then
cat ${WRKDIR}/rootlog.txt | sed -n '/No Package found/,/Exception/p' >> $xmlfilename || :
cat ${WRKDIR}/rootlog.txt | sed -n '/Error: /,/You could try using --skip-broken to work around the problem/p' >> $xmlfilename || :
fi
echo "</failure>" >> $xmlfilename
echo "</testcase>" >> $xmlfilename
fi
echo "</testsuite>" >> $xmlfilename
}

View File

@ -1,167 +0,0 @@
#!/bin/bash -ex
usage() {
cat <<EOF
Usage: $(basename "$0") [options]
If NO parameters specified, this script will:
- search for sources in the local directory
- put built packages to ./buildresult
- use the preconfigured upstream mirror (http://mirror.yandex.ru/ubuntu)
Mandatory arguments to long options are mandatory for short options too.
-h, --help display this help and exit
-b, --build-target distname (currently "trusty" and "centos7" are supported)
-s, --source sources directory
-u, --upstream-repo upstream mirror (default is mirror.yandex.ru/ubuntu)
-r, --ext-repos additional mirrors
-o, --output-dir output directory
Please use the following syntax to add additional repositories:
rpm:
"name1,http://url/to/repo1|name2,http://url/to/repo2"
deb:
"http://url/to/repo1 distro component1 component2|http://url/to/repo2 distro component3 component4"
IMPORTANT:
Sources should be prepared by the maintainer before the build:
rpm:
- srpm file:
./python-amqp-1.4.5-2.mira1.src.rpm
- file tree with .spec file and source tarball:
./python-pbr-0.10.0.tar.gz
./some-patch.patch
./python-pbr.spec
deb:
- packed sources (.dsc, .*z , .diff files):
./websocket-client_0.12.0-ubuntu1.debian.tar.gz
./websocket-client_0.12.0-ubuntu1.dsc
./websocket-client_0.12.0.orig.tar.gz
- file tree with pristine source tarball in the root of tree and debian folder inside some parent folder:
./python-pbr/debian/*
./python-pbr_0.10.0.orig.tar.gz
EOF
}
usage_short() {
echo Usage: $(basename "$0") [options]
echo
echo -e Try $(basename "$0") --help for more options.
}
die() { echo "$@" 1>&2 ; exit 1; }
OPTS=$(getopt -o b:s:e:o:u:h -l build-target:,source:,ext-repos:,output-dir:,upstream-repo:,help -- "$@")
if [ $? != 0 ]; then
usage_short
exit 1
fi
eval set -- "$OPTS"
WORKING_DIR=${0%/*}
while true ; do
case "$1" in
-h| --help ) usage ; exit 0;;
-b | --build-target ) BUILD_TARGET="$2"; shift; shift;;
-s | --source ) BUILD_SOURCE="$2"; shift; shift;;
-e | --ext-repos ) EXTRAREPO="$2"; export EXTRAREPO; shift; shift;;
-o | --output-dir ) OUTPUT_DIR="$2"; shift; shift;;
-u | --upstream-repo ) UPSTREAM_MIRROR="$2"; export UPSTREAM_MIRROR; shift; shift;;
-- ) shift; break;;
* ) break;;
esac
done
if [[ ${BUILD_SOURCE} = "" ]]; then
BUILD_SOURCE=${PWD}
fi
build_docker_image() {
case "$BUILD_TARGET" in
centos7)
docker build -t mockbuild "${WORKING_DIR}"/docker-builder/mockbuild/
;;
trusty)
docker build -t sbuild "${WORKING_DIR}"/docker-builder/sbuild/
;;
esac
}
create_buildroot() {
case "$BUILD_TARGET" in
centos7)
"${WORKING_DIR}"/docker-builder/create-rpm-chroot.sh
;;
trusty)
"${WORKING_DIR}"/docker-builder/create-deb-chroot.sh
;;
*) die "Unknown build target specified. Currently 'trusty' and 'centos7' are supported"
esac
}
update_buildroot() {
case "$BUILD_TARGET" in
centos7)
"${WORKING_DIR}"/docker-builder/update-rpm-chroot.sh
;;
trusty)
"${WORKING_DIR}"/docker-builder/update-deb-chroot.sh
;;
*) die "Unknown build target specified. Currently 'trusty' and 'centos7' are supported"
esac
}
main () {
case "$BUILD_TARGET" in
trusty)
export DIST="${BUILD_TARGET}"
if [[ "$(docker images -q sbuild 2> /dev/null)" == "" ]]; then
build_docker_image
create_buildroot
else
if [[ ! -d /var/cache/docker-builder/sbuild/"${BUILD_TARGET}"-amd64 ]]; then
create_buildroot
else
update_buildroot
fi
fi
cd "${BUILD_SOURCE}"
bash -ex "${WORKING_DIR}"/docker-builder/build-deb-package.sh
local exitstatus=`cat buildresult/exitstatus.sbuild || echo 1`
if [[ "${OUTPUT_DIR}" != "" ]]; then
mkdir -p "${OUTPUT_DIR}"
mv buildresult/* "${OUTPUT_DIR}"
rm -rf buildresult
fi
;;
centos7)
export DIST="${BUILD_TARGET}"
if [[ "$(docker images -q mockbuild 2> /dev/null)" == "" ]]; then
build_docker_image
create_buildroot
else
if [[ ! -d /var/cache/docker-builder/mock/cache/epel-7-x86_64 ]]; then
create_buildroot
else
update_buildroot
fi
fi
cd "${BUILD_SOURCE}"
bash -ex "${WORKING_DIR}"/docker-builder/build-rpm-package.sh
local exitstatus=`cat build/exitstatus.mock || echo 1`
if [[ "${OUTPUT_DIR}" != "" ]]; then
mkdir -p "${OUTPUT_DIR}"
mv build/* "${OUTPUT_DIR}"
rm -rf build
fi
;;
*) die "Unknown build target specified. Currently 'trusty' and 'centos7' are supported"
esac
exit "${exitstatus}"
}
main "${@}"

View File

@ -1,256 +0,0 @@
#!/bin/bash
set -o xtrace
set -o errexit
[ -f ".packages-defaults" ] && source .packages-defaults
BINDIR=$(dirname `readlink -e $0`)
source "${BINDIR}"/build-functions.sh
main () {
set_default_params
# Get package tree from gerrit
fetch_upstream
local _srcpath="${MYOUTDIR}/${PACKAGENAME}-src"
local _specpath=$_srcpath
local _testspath=$_srcpath
[ "$IS_OPENSTACK" == "true" ] && _specpath="${MYOUTDIR}/${PACKAGENAME}-spec${SPEC_PREFIX_PATH}" && _testspath="${MYOUTDIR}/${PACKAGENAME}-spec"
# Get last commit info
# $message $author $email $cdate $commitsha $lastgitlog
get_last_commit_info ${_srcpath}
local gitsrcsha=$(git -C ${_srcpath} log -n 1 --pretty=format:%H)
local gitsrcprj=$(git -C ${_srcpath} remote -v | head -n 1 | awk '{print $2}' | awk -F '/' '{print $NF}' | sed 's|.git$||' )
# Update specs
local specfile=`find $_specpath -name *.spec`
#local binpackagename=`rpm -q $RPMQUERYPARAMS --specfile $specfile --queryformat %{NAME}"\n" | head -1`
local define_macros=(
-D 'kernel_module_package_buildreqs kernel-devel'
-D 'kernel_module_package(n:v:r:s:f:xp:) \
%package -n kmod-%{-n*} \
Summary: %{-n*} kernel module(s) \
Version: %{version} \
Release: %{release} \
%description -n kmod-%{-n*} \
This package provides the %{-n*} kernel modules
' )
local version=`rpm -q "${define_macros[@]}" --specfile $specfile --queryformat %{VERSION}"\n" | head -1`
pkg_version="${version}"
local release=`rpm -q "${define_macros[@]}" --specfile $specfile --queryformat %{RELEASE}"\n" | head -1`
## Add changelog section if it doesn't exist
[ "`cat ${specfile} | grep -c '^%changelog'`" -eq 0 ] && echo "%changelog" >> ${specfile}
if [ "$IS_OPENSTACK" == "true" ] ; then
local gitspecsha=$(git -C ${_specpath} log -n 1 --pretty=format:%H)
local gitspecprj=$(git -C ${_specpath} remote -v | head -n 1 | awk '{print $2}' | awk -F '/' '{print $NF}' | sed 's|.git$||' )
# Get version number from the latest git tag for openstack packages
local release_tag=$(git -C $_srcpath describe --abbrev=0 --candidates=1 --match "*[0-9]*" | sed -r 's|^[^0-9]+||')
# Deal with PyPi versions like 2015.1.0rc1
# It breaks version comparison
# Change it to 2015.1.0~rc1
local script_dir=$(dirname $(readlink -e $0))
local convert_version_py="$script_dir/convert_version.py"
if grep -qE "^${SRC_PROJECT}\$" "$script_dir/fuel-projects-list"
then
local version_length=2
fi
version=$(python ${convert_version_py} --tag ${release_tag} \
${version_length:+ -l $version_length})
if [ "${version}" != "${pkg_version}" ] ; then
echo -e "ERROR: Version mismatch. Latest version from Gerrit tag: $version, and from changelog: $pkg_version. Build aborted."
exit 1
fi
# Get revision number as commit count for src+spec projects
local _rev=$(git -C $_srcpath rev-list --no-merges ${release_tag}..origin/${SOURCE_BRANCH} | wc -l)
[ "$GERRIT_CHANGE_STATUS" == "NEW" ] \
&& [ ${GERRIT_PROJECT} == "${SRC_PROJECT}" ] \
&& _rev=$(( $_rev + 1 ))
[ "$IS_HOTFIX" == "true" ] \
&& _rev=$(get_extra_revision hotfix ${_srcpath} ${release_tag})
[ "$IS_SECURITY" == "true" ] \
&& _rev=$(get_extra_revision security ${_srcpath} ${release_tag})
local release="mos${_rev}"
local TAR_NAME=${PACKAGENAME}-${version}.tar.gz
# Update version and changelog
sed -i "s|Version:.*$|Version: ${version}|" $specfile
sed -i "/Release/s|%{?dist}.*$|%{?dist}~${release}|" $specfile
sed -i "s|Source0:.*$|Source0: ${TAR_NAME}|" $specfile
# Prepare source tarball
pushd $_srcpath &>/dev/null
local ignore_list="openstack-macros horizon-vendor-theme fuel-astute fuel-library fuel-main fuel-nailgun-agent fuel-ui fuel-web fuel-agent"
if [ $(echo $ignore_list | grep -Eo "(^| )$PACKAGENAME( |$)") ]; then
# Do not perform `setup.py sdist` for openstack-macros package
tar -czf ${BUILDDIR}/$TAR_NAME $EXCLUDES .
else
# Use virtualenv to deal with different pbr requirements
local venv=$(mktemp -d)
virtualenv "$venv"
source "${venv}/bin/activate"
pip install --upgrade setuptools
python setup.py --version # this will download pbr if it's not available
PBR_VERSION=$release_tag python setup.py sdist -d ${BUILDDIR}/
deactivate
[ -d "$venv" ] && rm -rf "$venv"
# Fix source folder name at sdist tarball
local sdist_tarball=$(find ${BUILDDIR}/ -maxdepth 1 -name "*.gz")
if [ "$(tar -tf $sdist_tarball | head -n 1 | cut -d'/' -f1)" != "${PACKAGENAME}-${version}" ] ; then
# rename source folder
local tempdir=$(mktemp -d)
tar -C $tempdir -xf $sdist_tarball
mv $tempdir/* $tempdir/${PACKAGENAME}-${version}
tar -C $tempdir -czf ${BUILDDIR}/$TAR_NAME ${PACKAGENAME}-${version}
rm -f $sdist_tarball
[ -d "$tempdir" ] && rm -rf $tempdir
else
mv $sdist_tarball ${BUILDDIR}/$TAR_NAME || :
fi
fi
cp $_specpath/rpm/SOURCES/* ${BUILDDIR}/ &>/dev/null || :
else
# TODO: Support unpacked source tree
# Packed sources (.spec + .gz + stuff)
# Exclude tests folder
cp -R ${_srcpath}/* $BUILDDIR
[ -d "${BUILDDIR}/tests" ] && rm -rf ${BUILDDIR}/tests
fi
## Update changelog
firstline=1
if [ ! -z "$lastgitlog" ]; then
sed -i "/^%changelog/i%newchangelog" ${specfile}
echo "$lastgitlog" | while read LINE; do
commitid=`echo "$LINE" | cut -d'|' -f1`
email=`echo "$LINE" | cut -d'|' -f2`
author=`echo "$LINE" | cut -d'|' -f3`
# Get current date to avoid wrong chronological order in %changelog section
date=`LC_TIME=C date +"%a %b %d %Y"`
subject=`echo "$LINE" | cut -d'|' -f4`
[ $firstline == 1 ] && sed -i "/^%changelog/i\* $date $author \<${email}\> \- ${version}-${release}" ${specfile}
sed -i "/^%changelog/i\- $commitid $subject" ${specfile}
firstline=0
done
sed -i '/^%changelog/i\\' ${specfile}
sed -i '/^%changelog/d' ${specfile}
sed -i 's|^%newchangelog|%changelog|' ${specfile}
fi
echo "Resulting spec-file:"
cat ${specfile}
cp ${specfile} ${BUILDDIR}/
# Prepare tests folder to provide as parameter
rm -f ${WRKDIR}/tests.envfile
[ -d "${_testspath}/tests" ] && echo "TESTS_CONTENT='`tar -cz -C ${_testspath} tests | base64 -w0`'" > ${WRKDIR}/tests.envfile
# Build stage
local REQUEST=$REQUEST_NUM
[ -n "$LP_BUG" ] && REQUEST=$LP_BUG
RPM_HOTFIX_REPO_PATH=${RPM_HOTFIX_REPO_PATH:-${RPM_OS_REPO_PATH%/*}/hotfix}
[ -n "${EXTRAREPO}" ] && EXTRAREPO="${EXTRAREPO}|"
EXTRAREPO="${EXTRAREPO}repo1,http://${REMOTE_REPO_HOST}/${RPM_OS_REPO_PATH}/x86_64"
case true in
"$IS_HOTFIX" )
EXTRAREPO="${EXTRAREPO}|repo2,http://${REMOTE_REPO_HOST}/${RPM_HOTFIX_REPO_PATH}/x86_64"
EXTRAREPO="${EXTRAREPO}|repo3,http://${REMOTE_REPO_HOST}/${RPM_UPDATES_REPO_PATH}/x86_64"
EXTRAREPO="${EXTRAREPO}|repo4,http://${REMOTE_REPO_HOST}/${RPM_SECURITY_REPO_PATH}/x86_64"
;;
"$IS_SECURITY" )
EXTRAREPO="${EXTRAREPO}|repo2,http://${REMOTE_REPO_HOST}/${RPM_UPDATES_REPO_PATH}/x86_64"
EXTRAREPO="${EXTRAREPO}|repo3,http://${REMOTE_REPO_HOST}/${RPM_SECURITY_REPO_PATH}/x86_64"
;;
"$IS_UPDATES" )
EXTRAREPO="${EXTRAREPO}|repo2,http://${REMOTE_REPO_HOST}/${RPM_PROPOSED_REPO_PATH}/x86_64"
EXTRAREPO="${EXTRAREPO}|repo3,http://${REMOTE_REPO_HOST}/${RPM_UPDATES_REPO_PATH}/x86_64"
EXTRAREPO="${EXTRAREPO}|repo4,http://${REMOTE_REPO_HOST}/${RPM_SECURITY_REPO_PATH}/x86_64"
;;
esac
if [ "$GERRIT_CHANGE_STATUS" == "NEW" ] && [ -n "$LP_BUG" -o -n "$CUSTOM_REPO_ID" ] ; then
local REMOTE_REQUEST_REPO_HOST=${REMOTE_REQUEST_REPO_HOST:-$REMOTE_REPO_HOST}
local RPM_REQUEST_HOTFIX_REPO_PATH=${RPM_REQUEST_HOTFIX_REPO_PATH:-$RPM_HOTFIX_REPO_PATH}
local RPM_REQUEST_SECURITY_REPO_PATH=${RPM_REQUEST_SECURITY_REPO_PATH:-$RPM_SECURITY_REPO_PATH}
local RPM_REQUEST_PROPOSED_REPO_PATH=${RPM_REQUEST_PROPOSED_REPO_PATH:-$RPM_PROPOSED_REPO_PATH}
local RPM_REQUEST_OS_REPO_PATH=${RPM_REQUEST_OS_REPO_PATH:-$RPM_OS_REPO_PATH}
case true in
"$IS_HOTFIX" )
local RPM_REQUEST_REPO_PATH=$RPM_REQUEST_HOTFIX_REPO_PATH
;;
"$IS_SECURITY" )
local RPM_REQUEST_REPO_PATH=$RPM_REQUEST_SECURITY_REPO_PATH
;;
"$IS_UPDATES" )
local RPM_REQUEST_REPO_PATH=$RPM_REQUEST_PROPOSED_REPO_PATH
;;
* )
local RPM_REQUEST_REPO_PATH=$RPM_REQUEST_OS_REPO_PATH
;;
esac
EXTRAREPO="${EXTRAREPO}|repo5,http://${REMOTE_REQUEST_REPO_HOST}/${REPO_REQUEST_PATH_PREFIX}/${REQUEST}/${RPM_REQUEST_REPO_PATH}/x86_64"
fi
export EXTRAREPO
if [ -n "$EXTRAREPO" ] ; then
local EXTRAPARAMS=""
local OLDIFS="$IFS"
IFS='|'
for repo in $EXTRAREPO; do
IFS="$OLDIFS"
[ -n "$repo" ] && EXTRAPARAMS="${EXTRAPARAMS} --repository ${repo#*,}"
IFS='|'
done
IFS="$OLDIFS"
fi
local tmpdir=$(mktemp -d ${PKG_DIR}/build-XXXXXXXX)
echo "BUILD_SUCCEEDED=false" > ${WRKDIR}/buildresult.params
bash -c "${WRKDIR}/build \
--verbose \
--no-keep-chroot \
--dist ${DIST} \
--build \
--source $BUILDDIR \
--output $tmpdir \
${EXTRAPARAMS}"
local exitstatus=$(cat ${tmpdir}/exitstatus || echo 1)
[ -f "${tmpdir}/build.log" ] && mv "${tmpdir}/build.log" "${WRKDIR}/buildlog.txt"
[ -f "${tmpdir}/root.log" ] && mv "${tmpdir}/root.log" "${WRKDIR}/rootlog.txt"
fill_buildresult $exitstatus 0 $PACKAGENAME RPM
if [ "$exitstatus" == "0" ] ; then
rm -f ${WRKDIR}/buildresult.params
cat >${WRKDIR}/buildresult.params<<-EOL
BUILD_HOST=`hostname -f`
PKG_PATH=$tmpdir
GERRIT_CHANGE_STATUS=$GERRIT_CHANGE_STATUS
REQUEST_NUM=$REQUEST_NUM
LP_BUG=$LP_BUG
IS_SECURITY=$IS_SECURITY
IS_HOTFIX=$IS_HOTFIX
EXTRAREPO="$EXTRAREPO"
REPO_TYPE=rpm
DIST=$DIST
EOL
# Fill yaml file
local srpmfile=$(find ${tmpdir}/ -name *.src.rpm)
local srcpackagename=$(rpm -qp $srpmfile --queryformat %{NAME}"\n" | head -1)
local newrelease=$(rpm -qp $srpmfile --queryformat %{RELEASE}"\n" | head -1)
local yaml_report_file=${tmpdir}/${srcpackagename}.yaml
echo "Source: ${srcpackagename}" > $yaml_report_file
echo "Version: ${version}-${newrelease}" >> $yaml_report_file
echo "Binary:" >> $yaml_report_file
for binary in $(find ${tmpdir}/ -name *.rpm | egrep -v '\.src\.rpm$') ; do
local binary_name=$(rpm -qp $binary --queryformat %{NAME}"\n" | head -1)
echo " - ${binary_name}" >> $yaml_report_file
done
echo "Build_time: $(date '+%F-%H-%M-%S')" >> $yaml_report_file
echo "Code_project:" >> $yaml_report_file
echo " ${gitsrcprj}: ${gitsrcsha}" >> $yaml_report_file
[ "$IS_OPENSTACK" == "true" ] \
&& echo " ${gitspecprj}: ${gitspecsha}" >> $yaml_report_file
fi
exit $exitstatus
}
main "$@"
exit 0

View File

@ -1,69 +0,0 @@
CONTAINER_NAME=docker-builder-mock:latest
TYPE=mock
CACHE_DIR=/var/cache/docker-build/${TYPE}/cache
ROOT_DIR=/var/cache/docker-build/${TYPE}/root
ROOT_NAME=centos-6-x86_64
CONFIG_CONTENT="
config_opts['plugin_conf']['tmpfs_enable'] = True
config_opts['plugin_conf']['tmpfs_opts'] = {}
config_opts['plugin_conf']['tmpfs_opts']['required_ram_mb'] = 2048
config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'] = '25g'
config_opts['plugin_conf']['tmpfs_opts']['mode'] = '0755'
config_opts['plugin_conf']['tmpfs_opts']['keep_mounted'] = False
config_opts['root'] = '${ROOT_NAME}'
config_opts['target_arch'] = 'x86_64'
config_opts['legal_host_arches'] = ('x86_64',)
config_opts['chroot_setup_cmd'] = 'install @buildsys-build'
config_opts['dist'] = 'el6' # only useful for --resultdir variable subst
config_opts['macros']['%dist'] = '.el6'
config_opts['releasever'] = '6'
config_opts['priorities.conf'] = '[main]\nenabled=1'
config_opts['yum.conf'] = \"\"\"
[main]
plugins=1
keepcache=1
debuglevel=2
reposdir=/dev/null
logfile=/var/log/yum.log
retries=20
obsoletes=1
gpgcheck=0
assumeyes=1
syslog_ident=mock
syslog_device=
# repos
[base]
name=BaseOS
baseurl=http://mirror.yandex.ru/centos/6/os/x86_64/
failovermethod=priority
gpgkey=file:///etc/pki/mock/RPM-GPG-KEY-CentOS-6
gpgcheck=1
[updates]
name=updates
enabled=1
baseurl=http://mirror.yandex.ru/centos/6/updates/x86_64/
failovermethod=priority
gpgkey=file:///etc/pki/mock/RPM-GPG-KEY-CentOS-6
gpgcheck=1
[extras]
name=extras
baseurl=http://mirror.yandex.ru/centos/6/extras/x86_64/
failovermethod=priority
gpgkey=file:///etc/pki/mock/RPM-GPG-KEY-EPEL-6
gpgcheck=1
[epel]
name=epel
baseurl=http://mirror.yandex.ru/epel/6/x86_64/
failovermethod=priority
gpgkey=file:///etc/pki/mock/RPM-GPG-KEY-EPEL-6
gpgcheck=1
\"\"\"
"

View File

@ -1,69 +0,0 @@
CONTAINER_NAME=docker-builder-mock:latest
TYPE=mock
CACHE_DIR=/var/cache/docker-build/${TYPE}/cache
ROOT_DIR=/var/cache/docker-build/${TYPE}/root
ROOT_NAME=centos-7-x86_64
CONFIG_CONTENT="
config_opts['plugin_conf']['tmpfs_enable'] = True
config_opts['plugin_conf']['tmpfs_opts'] = {}
config_opts['plugin_conf']['tmpfs_opts']['required_ram_mb'] = 2048
config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'] = '25g'
config_opts['plugin_conf']['tmpfs_opts']['mode'] = '0755'
config_opts['plugin_conf']['tmpfs_opts']['keep_mounted'] = False
config_opts['root'] = '${ROOT_NAME}'
config_opts['target_arch'] = 'x86_64'
config_opts['legal_host_arches'] = ('x86_64',)
config_opts['chroot_setup_cmd'] = 'install @buildsys-build'
config_opts['dist'] = 'el7' # only useful for --resultdir variable subst
config_opts['macros']['%dist'] = '.el7'
config_opts['releasever'] = '7'
config_opts['priorities.conf'] = '[main]\nenabled=1'
config_opts['yum.conf'] = \"\"\"
[main]
plugins=1
keepcache=1
debuglevel=2
reposdir=/dev/null
logfile=/var/log/yum.log
retries=20
obsoletes=1
gpgcheck=0
assumeyes=1
syslog_ident=mock
syslog_device=
# repos
[base]
name=BaseOS
baseurl=http://mirror.yandex.ru/centos/7/os/x86_64/
failovermethod=priority
gpgkey=file:///etc/pki/mock/RPM-GPG-KEY-CentOS-7
gpgcheck=1
[updates]
name=updates
enabled=1
baseurl=http://mirror.yandex.ru/centos/7/updates/x86_64/
failovermethod=priority
gpgkey=file:///etc/pki/mock/RPM-GPG-KEY-CentOS-7
gpgcheck=1
[extras]
name=extras
baseurl=http://mirror.yandex.ru/centos/7/extras/x86_64/
failovermethod=priority
gpgkey=file:///etc/pki/mock/RPM-GPG-KEY-EPEL-7
gpgcheck=1
[epel]
name=epel
baseurl=http://mirror.yandex.ru/epel/7/x86_64/
failovermethod=priority
gpgkey=file:///etc/pki/mock/RPM-GPG-KEY-EPEL-7
gpgcheck=1
\"\"\"
"

View File

@ -1 +0,0 @@
#DOCKER_OPTS="--dns 8.8.8.8"

View File

@ -1,9 +0,0 @@
FROM centos:centos7
# Authors: Dmitry Burmistrov <dburmistrov@mirantis.com>
MAINTAINER Dmitry Burmistrov <dburmistrov@mirantis.com>
RUN yum -y --disableplugin=fastestmirror install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm && \
yum -y --disableplugin=fastestmirror install mock yum-plugin-priorities && \
yum clean all && \
useradd abuild -g mock

View File

@ -1,11 +0,0 @@
#!/bin/sh
. "${SETUP_DATA_DIR}/common-data"
. "${SETUP_DATA_DIR}/common-functions"
#. "$SETUP_DATA_DIR/common-config"
if [ "${STAGE}" = "setup-start" ]; then
mount -t tmpfs overlay /var/lib/schroot/union/overlay
elif [ "${STAGE}" = "setup-recover" ]; then
mount -t tmpfs overlay /var/lib/schroot/union/overlay
elif [ "${STAGE}" = "setup-stop" ]; then
umount -f /var/lib/schroot/union/overlay
fi

View File

@ -1,31 +0,0 @@
FROM ubuntu:trusty
# Authors: Dmitry Burmistrov <dburmistrov@mirantis.com>
MAINTAINER Dmitry Burmistrov <dburmistrov@mirantis.com>
ENV DEBIAN_FRONTEND noninteractive
ENV DEBCONF_NONINTERACTIVE_SEEN true
COPY ./sbuild-key.pub /var/lib/sbuild/apt-keys/sbuild-key.pub
COPY ./sbuild-key.sec /var/lib/sbuild/apt-keys/sbuild-key.sec
RUN rm -f /etc/apt/sources.list.d/proposed.list && \
apt-get update && apt-get -y install sbuild debhelper && \
apt-get clean && \
mkdir -p /srv/build && \
sed -i '/^1/d' /etc/sbuild/sbuild.conf && \
echo "\$build_arch_all = 1;" >> /etc/sbuild/sbuild.conf && \
echo "\$log_colour = 0;" >> /etc/sbuild/sbuild.conf && \
echo "\$apt_allow_unauthenticated = 1;" >> /etc/sbuild/sbuild.conf && \
echo "\$apt_update = 0;" >> /etc/sbuild/sbuild.conf && \
echo "\$apt_clean = 0;" >> /etc/sbuild/sbuild.conf && \
echo "\$build_source = 1;" >> /etc/sbuild/sbuild.conf && \
echo "\$build_dir = '/srv/build';" >> /etc/sbuild/sbuild.conf && \
echo "\$log_dir = '/srv/build';" >> /etc/sbuild/sbuild.conf && \
echo "\$stats_dir = '/srv/build';" >> /etc/sbuild/sbuild.conf && \
echo "\$verbose = 100;" >> /etc/sbuild/sbuild.conf && \
echo "\$mailprog = '/bin/true';" >> /etc/sbuild/sbuild.conf && \
echo "\$purge_build_deps = 'never';" >> /etc/sbuild/sbuild.conf && \
echo "1;" >> /etc/sbuild/sbuild.conf
COPY ./04tmpfs /etc/schroot/setup.d/04tmpfs
RUN chmod +x /etc/schroot/setup.d/04tmpfs

View File

@ -1,11 +0,0 @@
DIST_NAME=jessie
CONTAINER_NAME=docker-builder-sbuild:latest
TYPE=sbuild
CACHE_DIR=/var/cache/docker-build/${TYPE}/cache
ROOT_DIR=/var/cache/docker-build/${TYPE}/root
ROOT_NAME=${DIST_NAME}-amd64
APT_SOURCES_CONTENT="
deb http://httpredir.debian.org/debian ${DIST_NAME} main
deb http://httpredir.debian.org/debian ${DIST_NAME}-updates main
"

View File

@ -1,11 +0,0 @@
DIST_NAME=trusty
CONTAINER_NAME=docker-builder-sbuild:latest
TYPE=sbuild
CACHE_DIR=/var/cache/docker-build/${TYPE}/cache
ROOT_DIR=/var/cache/docker-build/${TYPE}/root
ROOT_NAME=${DIST_NAME}-amd64
APT_SOURCES_CONTENT="
deb http://archive.ubuntu.com/ubuntu $DIST_NAME main universe multiverse restricted
deb http://archive.ubuntu.com/ubuntu ${DIST_NAME}-updates main universe multiverse restricted
"

View File

@ -1,11 +0,0 @@
DIST_NAME=xenial
CONTAINER_NAME=docker-builder-sbuild:latest
TYPE=sbuild
CACHE_DIR=/var/cache/docker-build/${TYPE}/cache
ROOT_DIR=/var/cache/docker-build/${TYPE}/root
ROOT_NAME=${DIST_NAME}-amd64
APT_SOURCES_CONTENT="
deb http://archive.ubuntu.com/ubuntu $DIST_NAME main universe multiverse restricted
deb http://archive.ubuntu.com/ubuntu ${DIST_NAME}-updates main universe multiverse restricted
"

View File

@ -1,80 +0,0 @@
#!/usr/bin/env python
##
# Convert pip style alpha/beta/rc/dev versions to the ones suitable for a
# package manager.
# Does not modify the conventional 3-digit version numbers.
# Examples:
# 1.2.3.0a4 -> 1.2.3~a4
# 1.2.3rc1 -> 1.2.3~rc1
# 1.2.3 -> 1.2.3
import argparse
from pkg_resources import parse_version
import re
def strip_leading_zeros(s):
return re.sub(r"^0+([0-9]+)", r"\1", s)
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'-t', '--tag', dest='tag', action='store', type=str,
help='PyPi version tag', required=True, default='0'
)
parser.add_argument(
'-l', '--version-length', type=int, default=3,
help="Number of version components"
)
params, other_params = parser.parse_known_args()
print(convert_version(params.tag, params.version_length))
def convert_version(pip_ver, version_length):
# drop dashed part from version string because
# it represents a patch level of given version
pip_ver = pip_ver.split('-')[0]
# add leading 1 if tag is starting from letter
if re.match(r"^[a-zA-Z]", pip_ver):
pip_ver = '1' + pip_ver
# parse_version converts string '12.0.0.0rc1'
# to touple ('00000012', '*c', '00000001', '*final')
# details:
# http://galaxy-dist.readthedocs.org/en/latest/lib/pkg_resources.html
pip_ver_parts = parse_version(pip_ver)
_ver = True
pkg_ver_part = []
pkg_alpha = ""
pkg_rev_part = []
for part in pip_ver_parts:
if part == "*final":
continue
if re.match(r"[*a-z]", part):
_ver = False
pkg_alpha = re.sub(r"^\*", "~", part)
continue
if _ver:
pkg_ver_part.append(strip_leading_zeros(part))
else:
pkg_rev_part.append(strip_leading_zeros(part))
# replace 'c' and '@' with 'rc' and 'dev' at pkg_alpha
pkg_alpha = pkg_alpha.replace('c', 'rc')
pkg_alpha = pkg_alpha.replace('@', 'dev')
# expand version to three items
while (len(pkg_ver_part) < version_length):
pkg_ver_part.append('0')
return '.'.join(pkg_ver_part) + pkg_alpha + '.'.join(pkg_rev_part)
if __name__ == "__main__":
main()

View File

@ -1,42 +0,0 @@
#!/bin/bash -ex
. $(dirname $(readlink -f $0))/config
CONTAINERNAME=sbuild:latest
CACHEPATH=/var/cache/docker-builder/sbuild
[ -z "$DIST" ] && DIST=trusty
if [ -n "$EXTRAREPO" ] ; then
EXTRACMD=""
OLDIFS="$IFS"
IFS='|'
for repo in $EXTRAREPO; do
IFS="$OLDIFS"
EXTRACMD="${EXTRACMD} --chroot-setup-commands=\"apt-add-repo deb $repo\" "
IFS='|'
done
IFS="$OLDIFS"
fi
dscfile=$(find . -maxdepth 1 -name \*.dsc | head -1)
debianfolder=$(find . -wholename "*debian/changelog*" | head -1 | sed 's|^./||; s|debian/changelog||')
if [ -n "$dscfile" ]; then
SOURCEDEST=$dscfile
SOURCEDEST=`basename $SOURCEDEST`
elif [ -n "$debianfolder" ] ; then
SOURCEDEST=$debianfolder
fi
docker run ${DNSPARAM} --privileged --rm -v ${CACHEPATH}:/srv/images:ro \
-v $(pwd):/srv/source ${CONTAINERNAME} \
bash -c "( sed -i '/debian\/rules/d' /usr/bin/sbuild
DEB_BUILD_OPTIONS=nocheck /usr/bin/sbuild -d ${DIST} --nolog \
--source --force-orig-source \
$EXTRACMD \
--chroot-setup-commands=\"apt-get update\" \
--chroot-setup-commands=\"apt-get upgrade -f -y --force-yes\" \
/srv/source/${SOURCEDEST} 2>&1
echo \$? > /srv/build/exitstatus.sbuild ) \
| tee /srv/build/buildlog.sbuild
rm -rf /srv/source/buildresult
mv /srv/build /srv/source/buildresult
chown -R `id -u`:`id -g` /srv/source"

View File

@ -1,49 +0,0 @@
#!/bin/bash -ex
. $(dirname $(readlink -f $0))/config
CONTAINERNAME=mockbuild:latest
CACHEPATH=/var/cache/docker-builder/mock
DIST_VERSION=`echo $DIST | sed 's|centos||'`
[ -z "${DIST_VERSION}" ] && DIST_VERSION=7
EXTRACMD=":"
if [ -n "$EXTRAREPO" ] ; then
EXTRACMD="sed -i"
OLDIFS="$IFS"
IFS='|'
for repo in $EXTRAREPO ; do
IFS="$OLDIFS"
reponame=${repo%%,*}
repourl=${repo##*,}
EXTRACMD="$EXTRACMD -e \"/^\[base\]/i[${reponame}]\nname=${reponame}\nbaseurl=${repourl}\ngpgcheck=0\nenabled=1\nskip_if_unavailable=1\""
IFS='|'
done
IFS="$OLDIFS"
EXTRACMD="$EXTRACMD /etc/mock/centos-${DIST_VERSION}-x86_64.cfg"
fi
docker run ${DNSPARAM} --privileged --rm -v ${CACHEPATH}:/srv/mock:ro \
-v $(pwd):/home/abuild/rpmbuild ${CONTAINERNAME} \
bash -x -c "mkdir -p /srv/tmpfs/cache
mount -t tmpfs overlay /srv/tmpfs/cache
mount -t aufs -o br=/srv/tmpfs/cache/:/srv/mock/cache none /var/cache/mock/
mkdir -p /var/cache/mock/configs
cp /etc/mock/logging.ini /var/cache/mock/configs/
rm -rf /etc/mock
ln -s /var/cache/mock/configs /etc/mock
$EXTRACMD
echo 'Current config file:'
cat /etc/mock/centos-${DIST_VERSION}-x86_64.cfg
su - abuild -c 'mock -r centos-${DIST_VERSION}-x86_64 --verbose --update --old-chroot'
chown -R abuild.mock /home/abuild
[[ \$(ls /home/abuild/rpmbuild/*.src.rpm | wc -l) -eq 0 ]] \
&& su - abuild -c 'mock -r centos-${DIST_VERSION}-x86_64 --no-clean --no-cleanup-after --buildsrpm --verbose \
--sources=/home/abuild/rpmbuild --resultdir=/home/abuild/rpmbuild --buildsrpm \
--spec=\$(ls /home/abuild/rpmbuild/*.spec) --old-chroot'
rm -rf /home/abuild/rpmbuild/build
su - abuild -c 'mock -r centos-${DIST_VERSION}-x86_64 --no-clean --no-cleanup-after --verbose \
--resultdir=/home/abuild/rpmbuild/build \$(ls /home/abuild/rpmbuild/*.src.rpm) --old-chroot'
echo \$? > /home/abuild/rpmbuild/build/exitstatus.mock
umount -f /var/cache/mock /srv/tmpfs/cache
rm -rf /srv/tmpfs
rm -f /home/abuild/rpmbuild/\*.src.rpm /home/abuild/rpmbuild/{build,root,state}.log
chown -R `id -u`:`id -g` /home/abuild"

View File

@ -1 +0,0 @@
DNSPARAM="--dns 172.18.80.136"

View File

@ -1,36 +0,0 @@
#!/bin/bash
#
# Prepare chroot (must exist before starting any builds) environment
# with `sbuild-createchroot` which prepares everything for building DEBs
#
# Usage: DIST=trusty ./create-deb-chroot.sh # for Trusty
# DIST=precise ./create-deb-chroot.sh # for Precise
# UPSTREAM_MIRROR=http://ua.archive.ubuntu.com/ubuntu/ ./create-deb-chroot.sh
set -ex
BIN="${0%/*}"
source "${BIN}/config"
CONTAINERNAME=sbuild:latest
CACHEPATH=/var/cache/docker-builder/sbuild
# define upstream Ubuntu mirror
MIRROR=${UPSTREAM_MIRROR:-http://mirror.yandex.ru/ubuntu}
# Use trusty distro by default
[ -z "${DIST}" ] && DIST=trusty
if [ "${DIST}" != "precise" ] && [ "${DIST}" != "trusty" ]; then
echo "Unknown dist version: ${DIST}"
exit 1
fi
docker run ${DNSPARAM} --privileged --rm -v ${CACHEPATH}:/srv/images ${CONTAINERNAME} \
bash -c "rm -f /etc/schroot/chroot.d/*
sbuild-createchroot ${DIST} /srv/images/${DIST}-amd64 ${MIRROR}
echo deb ${MIRROR} ${DIST} main universe multiverse restricted > /srv/images/${DIST}-amd64/etc/apt/sources.list
echo deb ${MIRROR} ${DIST}-updates main universe multiverse restricted >> /srv/images/${DIST}-amd64/etc/apt/sources.list
sbuild-update -udcar ${DIST}
echo '#!/bin/bash' > /srv/images/${DIST}-amd64/usr/bin/apt-add-repo
echo 'echo \$* >> /etc/apt/sources.list' >> /srv/images/${DIST}-amd64/usr/bin/apt-add-repo
chmod +x /srv/images/${DIST}-amd64/usr/bin/apt-add-repo"

View File

@ -1,43 +0,0 @@
#!/bin/bash
#
# Prepare chroot (must exist before starting any builds) environment
# with `mock --init` which installs all packages (@buildsys-build)
# required for building RPMs
#
# Usage: DIST=6 ./create-rpm-chroot.sh # for CentOS 6
# DIST=7 ./create-rpm-chroot.sh # for CentOS 7
set -ex
BIN="${0%/*}"
source "${BIN}/config"
CONTAINERNAME=mockbuild:latest
CACHEPATH=/var/cache/docker-builder/mock
# check DIST=centos6 which can be passed from upstream job or defined in env
DIST_VERSION=${DIST/centos/}
# by default we init env for CentOS 7
[ -z "${DIST_VERSION}" ] && DIST_VERSION=7
if [ ! -f "${BIN}/mockbuild/centos${DIST_VERSION}.conf" ] ; then
echo "Unknown dist version: ${DIST_VERSION}"
exit 1
fi
source ${BIN}/mockbuild/centos${DIST_VERSION}.conf
CONFIG_CONTENT_BASE64=$(echo "${CONFIG_CONTENT}" | base64 -w0)
docker run ${DNSPARAM} --privileged --rm -v ${CACHEPATH}/cache:/var/cache/mock ${CONTAINERNAME} \
bash -c "mkdir -p /var/cache/mock/configs
cp /etc/mock/logging.ini /var/cache/mock/configs/
rm -rf /etc/mock
ln -s /var/cache/mock/configs /etc/mock
rm -rf /var/cache/mock/epel-${DIST_VERSION}-x86_64
rm -f /etc/mock/centos-${DIST_VERSION}-x86_64.cfg
echo \"${CONFIG_CONTENT_BASE64}\" \
| base64 -d > /etc/mock/centos-${DIST_VERSION}-x86_64.cfg
echo 'Current config file:'
cat /etc/mock/centos-${DIST_VERSION}-x86_64.cfg
chown -R abuild:mock /var/cache/mock
chmod g+s /var/cache/mock
su - abuild -c 'mock -r centos-${DIST_VERSION}-x86_64 -v --init --old-chroot'"

View File

@ -1,14 +0,0 @@
FROM centos:centos7
# Authors: Dmitry Burmistrov <dburmistrov@mirantis.com>
# Igor Gnatenko <ignatenko@mirantis.com>
MAINTAINER Igor Gnatenko <ignatenko@mirantis.com>
RUN yum -y --disableplugin=fastestmirror install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm && \
yum -y --disableplugin=fastestmirror install --enablerepo=epel-testing mock && \
yum clean --enablerepo=epel-testing all && \
useradd abuild -g mock
COPY mock_configure.sh /
RUN /mock_configure.sh; \
rm -f /mock_configure.sh

View File

@ -1,60 +0,0 @@
CONFIG_CONTENT="
config_opts['plugin_conf']['tmpfs_enable'] = True
config_opts['plugin_conf']['tmpfs_opts'] = {}
config_opts['plugin_conf']['tmpfs_opts']['required_ram_mb'] = 2048
config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'] = '25g'
config_opts['plugin_conf']['tmpfs_opts']['mode'] = '0755'
config_opts['plugin_conf']['tmpfs_opts']['keep_mounted'] = False
config_opts['root'] = 'epel-7-x86_64'
config_opts['target_arch'] = 'x86_64'
config_opts['legal_host_arches'] = ('x86_64',)
config_opts['chroot_setup_cmd'] = 'install @buildsys-build'
config_opts['dist'] = 'el7' # only useful for --resultdir variable subst
config_opts['macros']['%dist'] = '.el7'
config_opts['releasever'] = '7'
config_opts['yum.conf'] = \"\"\"
[main]
keepcache=1
debuglevel=2
reposdir=/dev/null
logfile=/var/log/yum.log
retries=20
obsoletes=1
gpgcheck=0
assumeyes=1
syslog_ident=mock
syslog_device=
# repos
[base]
name=BaseOS
baseurl=http://vault.centos.org/7.2.1511/os/x86_64/
failovermethod=priority
gpgkey=file:///usr/share/distribution-gpg-keys/centos/RPM-GPG-KEY-CentOS-7
gpgcheck=1
[updates]
name=updates
enabled=1
baseurl=http://vault.centos.org/7.2.1511/updates/x86_64/
failovermethod=priority
gpgkey=file:///usr/share/distribution-gpg-keys/centos/RPM-GPG-KEY-CentOS-7
gpgcheck=1
[extras]
name=extras
baseurl=http://vault.centos.org/7.2.1511/extras/x86_64/
failovermethod=priority
gpgkey=file:///usr/share/distribution-gpg-keys/centos/RPM-GPG-KEY-CentOS-7
gpgcheck=1
[epel]
name=epel
baseurl=http://mirror.yandex.ru/epel/7/x86_64/
failovermethod=priority
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-7
gpgcheck=1
\"\"\"
"

View File

@ -1,60 +0,0 @@
CONFIG_CONTENT="
config_opts['plugin_conf']['tmpfs_enable'] = True
config_opts['plugin_conf']['tmpfs_opts'] = {}
config_opts['plugin_conf']['tmpfs_opts']['required_ram_mb'] = 2048
config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'] = '25g'
config_opts['plugin_conf']['tmpfs_opts']['mode'] = '0755'
config_opts['plugin_conf']['tmpfs_opts']['keep_mounted'] = False
config_opts['root'] = 'epel-71-x86_64'
config_opts['target_arch'] = 'x86_64'
config_opts['legal_host_arches'] = ('x86_64',)
config_opts['chroot_setup_cmd'] = 'install @buildsys-build'
config_opts['dist'] = 'el7' # only useful for --resultdir variable subst
config_opts['macros']['%dist'] = '.el7'
config_opts['releasever'] = '7'
config_opts['yum.conf'] = \"\"\"
[main]
keepcache=1
debuglevel=2
reposdir=/dev/null
logfile=/var/log/yum.log
retries=20
obsoletes=1
gpgcheck=0
assumeyes=1
syslog_ident=mock
syslog_device=
# repos
[base]
name=BaseOS
baseurl=http://vault.centos.org/7.1.1503/os/x86_64/
failovermethod=priority
gpgkey=file:///usr/share/distribution-gpg-keys/centos/RPM-GPG-KEY-CentOS-7
gpgcheck=1
[updates]
name=updates
enabled=1
baseurl=http://vault.centos.org/7.1.1503/updates/x86_64/
failovermethod=priority
gpgkey=file:///usr/share/distribution-gpg-keys/centos/RPM-GPG-KEY-CentOS-7
gpgcheck=1
[extras]
name=extras
baseurl=http://vault.centos.org/7.1.1503/extras/x86_64/
failovermethod=priority
gpgkey=file:///usr/share/distribution-gpg-keys/centos/RPM-GPG-KEY-CentOS-7
gpgcheck=1
[epel]
name=epel
baseurl=http://mirror.yandex.ru/epel/7/x86_64/
failovermethod=priority
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-7
gpgcheck=1
\"\"\"
"

View File

@ -1,24 +0,0 @@
#!/bin/bash
# Generate mock configuration files:
# /etc/mock/centos-7-x86_64.cfg
# /etc/mock/centos-6-x86_64.cfg
# both for el6, el7,
# Add configuration param:
# config_opts['macros']['%dist'] = '.${DIST}${DISTSUFFIX}'
set -e
for cfg in /etc/mock/epel-{6,7}-x86_64.cfg; do
DIST=$(awk -F"'" "/config_opts\['dist'\]/ {print \$4}" "${cfg}")
sed -e "/config_opts\['dist'\]/s/$/\nconfig_opts['macros']['%dist'] = '.${DIST}${DISTSUFFIX}'/" $cfg \
>${cfg/epel/centos}
done
# Enable tmpfs mock plugin
cat > /etc/mock/site-defaults.cfg <<HEREDOC
config_opts['plugin_conf']['tmpfs_enable'] = True
config_opts['plugin_conf']['tmpfs_opts'] = {}
config_opts['plugin_conf']['tmpfs_opts']['required_ram_mb'] = 2048
config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'] = '25g'
config_opts['plugin_conf']['tmpfs_opts']['mode'] = '0755'
config_opts['plugin_conf']['tmpfs_opts']['keep_mounted'] = False
HEREDOC

View File

@ -1,11 +0,0 @@
#!/bin/sh
. "${SETUP_DATA_DIR}/common-data"
. "${SETUP_DATA_DIR}/common-functions"
#. "$SETUP_DATA_DIR/common-config"
if [ "${STAGE}" = "setup-start" ]; then
mount -t tmpfs overlay /var/lib/schroot/union/overlay
elif [ "${STAGE}" = "setup-recover" ]; then
mount -t tmpfs overlay /var/lib/schroot/union/overlay
elif [ "${STAGE}" = "setup-stop" ]; then
umount -f /var/lib/schroot/union/overlay
fi

View File

@ -1,41 +0,0 @@
FROM ubuntu:trusty
MAINTAINER dburmistrov@mirantis.com
ENV MIRROR http://mirror.yandex.ru/ubuntu
ENV NAMESERV 172.18.80.136
ENV DIST trusty
ENV DEBIAN_FRONTEND noninteractive
ENV DEBCONF_NONINTERACTIVE_SEEN true
VOLUME ["/srv/images", "/srv/source"]
COPY sbuild-key.pub /var/lib/sbuild/apt-keys/sbuild-key.pub
COPY sbuild-key.sec /var/lib/sbuild/apt-keys/sbuild-key.sec
RUN rm -f /etc/apt/sources.list.d/proposed.list && \
echo -e "\nnameserver $NAMESERV\n" >> /etc/resolv.conf && \
echo "deb $MIRROR $DIST main universe multiverse restricted" > /etc/apt/sources.list && \
echo "deb $MIRROR $DIST-updates main universe multiverse restricted" >> /etc/apt/sources.list && \
apt-get update && apt-get -y install sbuild debhelper && \
apt-get clean && \
mkdir -p /srv/build && \
sed -i '/^1/d' /etc/sbuild/sbuild.conf && \
echo "\$build_arch_all = 1;" >> /etc/sbuild/sbuild.conf && \
echo "\$log_colour = 0;" >> /etc/sbuild/sbuild.conf && \
echo "\$apt_allow_unauthenticated = 1;" >> /etc/sbuild/sbuild.conf && \
echo "\$apt_update = 0;" >> /etc/sbuild/sbuild.conf && \
echo "\$apt_clean = 0;" >> /etc/sbuild/sbuild.conf && \
echo "\$build_source = 1;" >> /etc/sbuild/sbuild.conf && \
echo "\$build_dir = '/srv/build';" >> /etc/sbuild/sbuild.conf && \
echo "\$log_dir = '/srv/build';" >> /etc/sbuild/sbuild.conf && \
echo "\$stats_dir = '/srv/build';" >> /etc/sbuild/sbuild.conf && \
echo "\$verbose = 100;" >> /etc/sbuild/sbuild.conf && \
echo "\$mailprog = '/bin/true';" >> /etc/sbuild/sbuild.conf && \
echo "\$purge_build_deps = 'never';" >> /etc/sbuild/sbuild.conf && \
echo "1;" >> /etc/sbuild/sbuild.conf
COPY ./04tmpfs /etc/schroot/setup.d/04tmpfs
RUN chmod +x /etc/schroot/setup.d/04tmpfs
COPY ./precise-amd64-sbuild /etc/schroot/chroot.d/precise-amd64-sbuild
COPY ./trusty-amd64-sbuild /etc/schroot/chroot.d/trusty-amd64-sbuild

View File

@ -1,8 +0,0 @@
[precise-amd64-sbuild]
type=directory
description=Ubuntu precise/amd64 build environment
directory=/srv/images/precise-amd64
groups=root,sbuild
root-groups=root,sbuild
profile=sbuild
union-type=aufs

View File

@ -1,8 +0,0 @@
[trusty-amd64-sbuild]
type=directory
description=Ubuntu trusty/amd64 build environment
directory=/srv/images/trusty-amd64
groups=root,sbuild
root-groups=root,sbuild
profile=sbuild
union-type=aufs

View File

@ -1,21 +0,0 @@
#!/bin/bash
set -ex
BIN="${0%/*}"
source "${BIN}/config"
CONTAINERNAME=sbuild:latest
CACHEPATH=/var/cache/docker-builder/sbuild
# Use trusty distro by default
[ -z "${DIST}" ] && DIST=trusty
if [ "${DIST}" != "precise" ] && [ "${DIST}" != "trusty" ]; then
echo "Unknown dist version: ${DIST}"
exit 1
fi
docker run ${DNSPARAM} --privileged --rm -v ${CACHEPATH}:/srv/images ${CONTAINERNAME} \
bash -c "sbuild-update -udcar ${DIST}"

View File

@ -1,36 +0,0 @@
#!/bin/bash
set -ex
BIN="${0%/*}"
source "${BIN}/config"
CONTAINERNAME=mockbuild:latest
CACHEPATH=/var/cache/docker-builder/mock
# check DIST=centos6 which can be passed from upstream job or defined in env
DIST_VERSION=${DIST/centos/}
# by default we init env for CentOS 7
[ -z "${DIST_VERSION}" ] && DIST_VERSION=7
if [ ! -f "${BIN}/mockbuild/centos${DIST_VERSION}.conf" ] ; then
echo "Unknown dist version: ${DIST_VERSION}"
exit 1
fi
source ${BIN}/mockbuild/centos${DIST_VERSION}.conf
CONFIG_CONTENT_BASE64=$(echo "${CONFIG_CONTENT}" | base64 -w0)
docker run ${DNSPARAM} --privileged --rm -v ${CACHEPATH}/cache:/var/cache/mock ${CONTAINERNAME} \
bash -c "
mkdir -p /var/cache/mock/configs
cp /etc/mock/logging.ini /var/cache/mock/configs/
rm -rf /etc/mock
ln -s /var/cache/mock/configs /etc/mock
rm -rf /var/cache/mock/epel-${DIST_VERSION}-x86_64/yum_cache
rm -f /etc/mock/centos-${DIST_VERSION}-x86_64.cfg
echo \"${CONFIG_CONTENT_BASE64}\" \
| base64 -d > /etc/mock/centos-${DIST_VERSION}-x86_64.cfg
echo 'Current config file:'
cat /etc/mock/centos-${DIST_VERSION}-x86_64.cfg
su - abuild -c 'mock -r centos-${DIST_VERSION}-x86_64 -v --update --old-chroot'"

View File

@ -1,14 +0,0 @@
openstack/fuel-agent
openstack/fuel-astute
openstack/fuel-library
openstack/fuel-main
openstack/fuel-menu
openstack/fuel-nailgun-agent
openstack/fuel-nailgun-extension-cluster-upgrade
openstack/fuel-octane
openstack/fuel-ostf
openstack/fuel-ui
openstack/fuel-web
openstack/network-checker
openstack/python-fuelclient
openstack/shotgun

View File

@ -1,44 +0,0 @@
#!/bin/bash
#
# needed functions
#
cleanup_and_exit()
{
trap EXIT
exit ${1:-0}
}
fail_exit()
{
echo "$@"
cleanup_and_exit 1
}
job_lock() {
[ -z "$1" ] && fail_exit "Lock file is not specified"
local LOCKFILE=$1
shift
local fd=1000
eval "exec $fd>>$LOCKFILE"
case $1 in
"set")
flock -x -n $fd \
|| fail_exit "Process already running. Lockfile: $LOCKFILE"
;;
"unset")
flock -u $fd
rm -f $LOCKFILE
;;
"wait")
local TIMEOUT=${2:-3600}
[ "${VERBOSE}" == "true" ] \
&& echo "Waiting of concurrent process (lockfile: $LOCKFILE, timeout = $TIMEOUT seconds) ..."
flock -x -w $TIMEOUT $fd \
|| fail_exit "Timeout error (lockfile: $LOCKFILE)"
;;
esac
}
trap fail_exit EXIT

View File

@ -1,43 +0,0 @@
#!/bin/bash
#
# needed functions
#
cleanup_mounts()
{
trap EXIT
local ROOT_NAME=%ROOT_NAME%
local TYPE=%TYPE%
local UNAME=%UNAME%
local GNAME=%GNAME%
case ${TYPE} in
mock)
umount -n /srv/tmpfs/cache 2> /dev/null || true
umount -n /var/cache/mock 2> /dev/null || true
umount -n /var/lib/mock/${ROOT_NAME}/root/tmp/ccache 2> /dev/null || true
umount -n /var/lib/mock/${ROOT_NAME}/root/var/cache/yum/ 2> /dev/null || true
umount -n -l /var/lib/mock/${ROOT_NAME}/root/dev/pts 2> /dev/null || true
umount -n -l /var/lib/mock/${ROOT_NAME}/root/dev/shm 2> /dev/null || true
umount -n -l /var/lib/mock/${ROOT_NAME}/root/sys 2> /dev/null || true
umount -n -l /var/lib/mock/${ROOT_NAME}/root/proc 2> /dev/null || true
rm -f /home/abuild/rpmbuild/{build,root,state}.log
chown -R ${UNAME}:${GNAME} /home/abuild
;;
sbuild)
umount -n -l /srv/root/dev/pts 2> /dev/null || true
umount -n -l /srv/root/dev/shm 2> /dev/null || true
umount -n -l /srv/root/sys 2> /dev/null || true
umount -n -l /srv/root/proc 2> /dev/null || true
umount -n -l /srv/root 2> /dev/null || true
schroot -e --all-sessions 2> /dev/null || true
chown -R ${UNAME}:${GNAME} /srv/source /srv/build 2> /dev/null || true
;;
*)
;;
esac
exit 0
}
trap cleanup_mounts EXIT

View File

@ -1,346 +0,0 @@
#!/bin/bash
docker_init_mock() {
local CONFIG_CONTENT_BASE64=$(echo "${CONFIG_CONTENT}" | base64 -w0)
docker run ${DOCKER_OPTS} --privileged --rm \
-v ${CACHE_DIR}:/var/cache/mock ${CONTAINER_NAME} \
bash ${BASH_OPTS} -c "
mkdir -p /var/cache/mock/configs
[ ! -f /var/cache/mock/configs/logging.ini ] \
&& cp /etc/mock/logging.ini /var/cache/mock/configs/
rm -rf /etc/mock
ln -s /var/cache/mock/configs /etc/mock
conffile=/etc/mock/${DIST}.cfg
tmpconffile=/etc/mock/tmp.${DIST}.cfg
rootpath=/var/cache/mock/$ROOT_NAME
tmprootpath=/var/cache/mock/tmp.$ROOT_NAME
[ -f \$conffile ] && mv \$conffile \$tmpconffile
[ -d \$rootpath ] && mv \$rootpath \$tmprootpath
echo \"${CONFIG_CONTENT_BASE64}\" \
| base64 -d > \$conffile
echo 'Current config file:'
cat \$conffile
chown -R abuild:mock /var/cache/mock
chmod g+s /var/cache/mock
if su - abuild -c \
'mock -r ${DIST} ${MOCK_OPTS} --init'
then
rm -rf \$tmprootpath \$tmpconffile
else
rm -rf \$conffile \$rootpath
[ -d \$tmprootpath ] && mv \$tmprootpath \$rootpath
[ -f \$tmpconffile ] && mv \$tmpconffile \$conffile
fi"
}
docker_init_sbuild() {
local MIRROR=$(echo "${APT_SOURCES_CONTENT}" | fgrep deb | head -1 | awk '{print $2}')
local APT_SOURCES_CONTENT_BASE64=$(echo "$APT_SOURCES_CONTENT" | base64 -w0)
docker run ${DOCKER_OPTS} --privileged --rm \
-v ${CACHE_DIR}:/srv/images ${CONTAINER_NAME} \
bash ${BASH_OPTS} -c "
mkdir -p /srv/images/chroot.d
rm -rf /etc/schroot/chroot.d
ln -s /srv/images/chroot.d /etc/schroot/chroot.d
conffile=/etc/schroot/chroot.d/$ROOT_NAME
tmpconffile=/tmp/tmp.$ROOT_NAME
rootpath=/srv/images/$ROOT_NAME
tmprootpath=/srv/images/tmp.$ROOT_NAME
[ -f \$conffile ] && mv \$conffile \$tmpconffile
[ -d \$rootpath ] && mv \$rootpath \$tmprootpath
if sbuild-createchroot ${DIST} \$rootpath ${MIRROR}
then
mv \${conffile}* \$conffile
echo 'union-type=aufs' >> \$conffile
echo $APT_SOURCES_CONTENT_BASE64 | base64 -d \
> \${rootpath}/etc/apt/sources.list
echo '#!/bin/bash' > \${rootpath}/usr/bin/apt-add-repo
echo 'echo \$* >> /etc/apt/sources.list' >> \${rootpath}/usr/bin/apt-add-repo
chmod +x \${rootpath}/usr/bin/apt-add-repo
echo '#!/bin/bash' > \${rootpath}/usr/bin/set-apt-prefs
echo 'echo \$* | base64 -d > /etc/apt/preferences' >> \${rootpath}/usr/bin/set-apt-prefs
chmod +x \${rootpath}/usr/bin/set-apt-prefs
if sbuild-update -udcar ${DIST}
then
rm -rf \$tmprootpath \$tmpconffile
else
rm -rf \$conffile \$rootpath
[ -d \$tmprootpath ] && mv \$tmprootpath \$rootpath
[ -f \$tmpconffile ] && mv \$tmpconffile \$conffile
fi
else
rm -rf \$conffile \$rootpath
[ -d \$tmprootpath ] && mv \$tmprootpath \$rootpath
[ -f \$tmpconffile ] && mv \$tmpconffile \$conffile
fi"
}
docker_update_mock() {
docker run ${DOCKER_OPTS} --privileged --rm \
-v ${CACHE_DIR}:/var/cache/mock ${CONTAINER_NAME} \
bash ${BASH_OPTS} -c "
rm -rf /etc/mock
mkdir -p /var/cache/mock/configs
ln -s /var/cache/mock/configs /etc/mock
conffile=/etc/mock/${DIST}.cfg
tmpconffile=/etc/mock/tmp.${DIST}.cfg
rootpath=/var/cache/mock/$ROOT_NAME
tmprootpath=/var/cache/mock/tmp.$ROOT_NAME
cp -Rl \$rootpath \$tmprootpath
cp \$conffile \$tmpconffile
rm -rf \${rootpath}/yum_cache
touch \$conffile -r \${rootpath}/root_cache/cache.tar.gz
echo 'Current config file:'
cat \$conffile
chown -R abuild:mock \$rootpath
chmod g+s \$rootpath
if su - abuild -c \
'mock -r ${DIST} ${MOCK_OPTS} --update'
then
rm -rf \$tmprootpath \$tmpconffile
else
rm -rf \$rootpath \$conffile
mv \$tmpconffile \$conffile
mv \$tmprootpath \$rootpath
fi"
}
docker_update_sbuild() {
local MIRROR=$(echo "${APT_SOURCES_CONTENT}" | fgrep deb | head -1 | awk '{print $2}')
local APT_SOURCES_CONTENT_BASE64=$(echo "${APT_SOURCES_CONTENT}" | base64 -w0)
docker run ${DOCKER_OPTS} --privileged --rm \
-v ${CACHE_DIR}:/srv/images ${CONTAINER_NAME} \
bash ${BASH_OPTS} -c "
mkdir -p /srv/images/chroot.d
rm -rf /etc/schroot/chroot.d
ln -s /srv/images/chroot.d /etc/schroot/chroot.d
rootpath=/srv/images/$ROOT_NAME
tmprootpath=/srv/images/tmp.$ROOT_NAME
cp -Rl \$rootpath \$tmprootpath
echo ${APT_SOURCES_CONTENT_BASE64} | base64 -d \
> \${rootpath}/etc/apt/sources.list
if sbuild-update -udcar ${DIST}
then
rm -rf \$tmprootpath
else
rm -rf \$rootpath
mv \$tmprootpath \$rootpath
fi"
}
docker_build_mock() {
[ $(ls -1 ${SOURCE_PATH}/*.{src.rpm,spec} 2>/dev/null | wc -l) -eq 0 ] \
&& fail_exit "ERROR: No RPM sources found at ${SOURCE_PATH}"
## Parse additional repositories
for repo in ${repos[@]} ; do
local reponame=$(mktemp -u XXXXXXXX)
local repourl=${repo%%,*}
local priority=${repo##*,}
if [ "$priority" == "$repourl" ] ; then
unset priority
else
priority="priority=$priority\n"
fi
local ADD_REPO_CMD="$ADD_REPO_CMD -e \"/^\[base\]/i[${reponame}]\nname=${reponame}\nbaseurl=${repourl}\ngpgcheck=0\nenabled=1\nskip_if_unavailable=1\n${priority}\""
done
[ ${#repos[@]} -gt 0 ] \
&& ADD_REPO_CMD="sed $ADD_REPO_CMD -i /etc/mock/${DIST}.cfg"
##
[ "$KEEP_CHROOT" == "true" ] \
&& local DOCKER_OPTS="${DOCKER_OPTS} -v ${ROOT_DIR}:/var/lib/mock"
local CLEANUP_SCRIPT=$(cat ${BIN_DIR}/functions/cleanup-functions | base64 -w0)
docker run ${DOCKER_OPTS} --privileged --rm \
-v ${CACHE_DIR}:/srv/mock/cache:ro \
-v ${SOURCE_PATH}:/home/abuild/rpmbuild \
-v ${DEST_PATH}:/home/abuild/buildresult \
${CONTAINER_NAME} \
bash ${BASH_OPTS} -c "
echo ${CLEANUP_SCRIPT} | base64 -d > /cleanup
sed -e 's|%ROOT_NAME%|${ROOT_NAME}|g' \
-e 's|%TYPE%|${TYPE}|g' \
-e 's|%UNAME%|${UNAME}|g' \
-e 's|%GNAME%|${GNAME}|g' \
-i /cleanup
source /cleanup
rm -rf /var/lib/mock/${ROOT_NAME}
mkdir -p /srv/tmpfs/cache
mount -t tmpfs overlay /srv/tmpfs/cache
mount -t aufs -o br=/srv/tmpfs/cache:/srv/mock/cache \
none /var/cache/mock
rm -rf /etc/mock
ln -s /var/cache/mock/configs /etc/mock
${ADD_REPO_CMD}
touch /etc/mock/${DIST}.cfg -r \
/var/cache/mock/${ROOT_NAME}/root_cache/cache.tar.gz
echo 'Current config file:'
cat /etc/mock/${DIST}.cfg
chown -R abuild.mock /home/abuild
rm -rf /var/cache/mock/${ROOT_NAME}/yum_cache
ulimit -n 10000
su - abuild -c 'mock -r ${DIST} ${MOCK_OPTS} --update'
if [ x${KEEP_CHROOT} = xtrue ] ; then
sed -i /etc/mock/${DIST}.cfg -e '/tmpfs_enable/s|True|False|'
mkdir -p /var/lib/mock/${ROOT_NAME}/root
tar -xf /var/cache/mock/${ROOT_NAME}/root_cache/cache.tar.gz \
-C /var/lib/mock/${ROOT_NAME}/root
fi
[ \$(ls /home/abuild/rpmbuild/*.src.rpm 2>/dev/null | wc -l) -eq 0 ] \
&& su - abuild -c 'mock -r ${DIST} --no-clean \
--no-cleanup-after --buildsrpm \
--sources=/home/abuild/rpmbuild \
--spec=\$(ls /home/abuild/rpmbuild/*.spec) \
--resultdir=/home/abuild/rpmbuild ${MOCK_OPTS}'
rm -rf /home/abuild/rpmbuild/build
su - abuild -c 'mock -r ${DIST} --no-clean \
--no-cleanup-after ${MOCK_OPTS} \
--resultdir=/home/abuild/buildresult \
\$(ls /home/abuild/rpmbuild/*.src.rpm)'
echo \$? > /home/abuild/buildresult/exitstatus"
local EXIT_STATUS=$(cat ${DEST_PATH}/exitstatus || echo 1)
if [ "$EXIT_STATUS" -ne 0 ] ; then
fail_exit "BUILD FAILED: returned result is $EXIT_STATUS"
fi
}
docker_build_sbuild() {
find ${SOURCE_PATH} | egrep "(debian/rules|\.dsc)$" &>/dev/null \
|| fail_exit "ERROR: No DEB sources found at ${SOURCE_PATH}"
local SOURCE_TARGET=$(find ${SOURCE_PATH} | egrep "(debian/rules|\.dsc)$" | head -1 2>/dev/null)
[ "${SOURCE_TARGET##*/}" == "rules" ] \
&& SOURCE_TARGET=$(basename $(dirname $(dirname ${SOURCE_TARGET})))/ \
|| SOURCE_TARGET=$(basename ${SOURCE_TARGET})
## Parse additional repositories
for repo_id in $(seq 0 $(( ${#repos[@]} - 1 ))) ; do
ADD_REPO_CMD="${ADD_REPO_CMD} --chroot-setup-commands=\"apt-add-repo deb ${repos[${repo_id}]}\" "
done
## Parse Apt pinning options
unset PIN_CONTENT
if [ ${#pins[@]} -gt 0 ] ; then
for pin_id in $(seq 0 $(( ${#pins[@]} - 1 ))) ; do
local PIN_CONTENT="${PIN_CONTENT}Package: ${pinpkgs[${pin_id}]}\nPin: ${pins[${pin_id}]}\nPin-Priority: ${pinprios[${pin_id}]}\n"
done
fi
unset PIN_CONTENT_BASE64
[ -n "${PIN_CONTENT}" ] \
&& PIN_CONTENT_BASE64=$(echo -e "${PIN_CONTENT}" | base64 -w0)
unset SET_APT_PREFS_CMD
[ -n "${PIN_CONTENT_BASE64}" ] \
&& local SET_APT_PREFS_CMD="--chroot-setup-commands=\"set-apt-prefs ${PIN_CONTENT_BASE64}\""
## Process hooks
local debian_dir=${SOURCE_PATH}/${SOURCE_TARGET}/debian
if [ -d "${debian_dir}/hooks.d" ] ; then
HOOKS_CMD="--chroot-setup-commands=\"run-parts /hooks.d/\""
fi
##
local SBUILD_OPTS="--nolog --source --force-orig-source"
if [ "$KEEP_CHROOT" = "true" ] ; then
local DOCKER_OPTS="${DOCKER_OPTS} -v ${ROOT_DIR}/${ROOT_NAME}:/srv/overlay"
local SBUILD_OPTS="${SBUILD_OPTS} --purge-build=never --purge-session=never"
local SBUILD_OPTS=${SBUILD_OPTS/--force-orig-source/}
fi
local CLEANUP_SCRIPT=$(cat ${BIN_DIR}/functions/cleanup-functions | base64 -w0)
docker run ${DOCKER_OPTS} --privileged --rm \
-v ${CACHE_DIR}:/srv/images:ro \
-v ${SOURCE_PATH}:/srv/source \
-v ${DEST_PATH}:/srv/build \
${CONTAINER_NAME} \
bash ${BASH_OPTS} -c "
echo ${CLEANUP_SCRIPT} | base64 -d > /cleanup
sed -e 's|%ROOT_NAME%|${ROOT_NAME}|g' \
-e 's|%TYPE%|${TYPE}|g' \
-e 's|%UNAME%|${UNAME}|g' \
-e 's|%GNAME%|${GNAME}|g' \
-i /cleanup
source /cleanup
rm -rf /etc/schroot/chroot.d
ln -s /srv/images/chroot.d /etc/schroot/chroot.d
ulimit -n 10000
if [ x$HOOKS_CMD != x ] ; then
echo '/srv/source/${SOURCE_TARGET}/debian/hooks.d /hooks.d none ro,bind 0 0' \
>> /etc/schroot/sbuild/fstab
fi
( sed -i /usr/bin/sbuild -e '/debian\/rules/d'
DEB_BUILD_OPTIONS=nocheck /usr/bin/sbuild -d ${DIST} \
${SBUILD_OPTS} \
${ADD_REPO_CMD} \
${SET_APT_PREFS_CMD} \
${HOOKS_CMD} \
--chroot-setup-commands=\"apt-get update\" \
--chroot-setup-commands=\"apt-get upgrade -f -y --force-yes\" \
/srv/source/${SOURCE_TARGET} 2>&1
echo \$? > /srv/build/exitstatus ) \
| tee /srv/build/buildlog.sbuild
if [ x$KEEP_CHROOT = xtrue ] ; then
rm -rf /srv/overlay/*
SESSION=\$(schroot --info --all-sessions | grep \"^ Name\" | awk '{print \$2}')
cp -R /var/lib/schroot/union/overlay/\${SESSION}/* /srv/overlay
cp -R /var/lib/schroot/mount/\${SESSION}/build /srv/overlay/
fi"
local EXIT_STATUS=$(cat ${DEST_PATH}/exitstatus || echo 1)
if [ "$EXIT_STATUS" -ne 0 ] ; then
fail_exit "BUILD FAILED: returned result is $EXIT_STATUS"
fi
}
docker_shell_mock() {
local CLEANUP_SCRIPT=$(cat ${BIN_DIR}/functions/cleanup-functions | base64 -w0)
local DOCKER_OPTS="${DOCKER_OPTS} -v ${ROOT_DIR}:/var/lib/mock"
docker run ${DOCKER_OPTS} -ti --privileged --rm \
-v ${CACHE_DIR}:/var/cache/mock:ro \
${CONTAINER_NAME} \
bash ${BASH_OPTS} -c "
echo ${CLEANUP_SCRIPT} | base64 -d > /cleanup
sed -e 's|%ROOT_NAME%|${ROOT_NAME}|g' \
-e 's|%TYPE%|${TYPE}|g' \
-e 's|%UNAME%|${UNAME}|g' \
-e 's|%GNAME%|${GNAME}|g' \
-i /cleanup
source /cleanup
mkdir -p /var/lib/mock/${ROOT_NAME}/root/{proc,sys,dev/{shm,pts},var/cache/yum,tmp/ccache}
mount -n -t proc proc /var/lib/mock/${ROOT_NAME}/root/proc
mount -n -t sysfs sysfs /var/lib/mock/${ROOT_NAME}/root/sys
mount -n -t tmpfs tmpfs /var/lib/mock/${ROOT_NAME}/root/dev/shm
mount -n -t devpts -o gid=5,mode=0620,ptmxmode=0666,newinstance devpts /var/lib/mock/${ROOT_NAME}/root/dev/pts
mount -n --bind /var/cache/mock/${ROOT_NAME}/yum_cache/ /var/lib/mock/${ROOT_NAME}/root/var/cache/yum
mount -n --bind /var/cache/mock/${ROOT_NAME}/ccache/u1000/ /var/lib/mock/${ROOT_NAME}/root/tmp/ccache
chroot /var/lib/mock/${ROOT_NAME}/root \
/bin/bash -c 'export LANG=en_US.UTF-8
export HOME=/builddir
export CCACHE_DIR=/tmp/ccache
export CCACHE_UMASK=002
cd \${HOME}/build
bash'"
}
docker_shell_sbuild() {
local DOCKER_OPTS="${DOCKER_OPTS} -v ${ROOT_DIR}:/srv/overlay"
local CLEANUP_SCRIPT=$(cat ${BIN_DIR}/functions/cleanup-functions | base64 -w0)
docker run ${DOCKER_OPTS} -ti --privileged --rm \
-v ${CACHE_DIR}:/srv/images:ro \
-v ${DEST_PATH}:/srv/build \
${CONTAINER_NAME} \
bash ${BASH_OPTS} -c "
echo ${CLEANUP_SCRIPT} | base64 -d > /cleanup
sed -e 's|%ROOT_NAME%|${ROOT_NAME}|g' \
-e 's|%TYPE%|${TYPE}|g' \
-e 's|%UNAME%|${UNAME}|g' \
-e 's|%GNAME%|${GNAME}|g' \
-i /cleanup
source /cleanup
mkdir -p /srv/root
mount -t aufs -o br=/srv/overlay/${ROOT_NAME}:/srv/images/${ROOT_NAME} \
none /srv/root
mount -n -t proc proc /srv/root/proc
mount -n -t sysfs sysfs /srv/root/sys
mount -n -t tmpfs tmpfs /srv/root/dev/shm
mount -n -t devpts -o gid=5,mode=0620,ptmxmode=0666,newinstance devpts /srv/root/dev/pts
chroot /srv/root \
/bin/bash -c '
cd /build/*/
bash'"
}

View File

@ -1,33 +0,0 @@
#!/bin/bash -xe
export LANG=C
function exit_with_error() {
echo "$@"
exit 1
}
function job_lock() {
[ -z "$1" ] && exit_with_error "Lock file is not specified"
local LOCKFILE=$1
shift
local fd=1000
eval "exec $fd>>$LOCKFILE"
case $1 in
"set")
flock -x -n $fd \
|| exit_with_error "Process already running. Lockfile: $LOCKFILE"
;;
"unset")
flock -u $fd
rm -f $LOCKFILE
;;
"wait")
local TIMEOUT=${2:-3600}
echo "Waiting of concurrent process (lockfile: $LOCKFILE, timeout = $TIMEOUT seconds) ..."
flock -x -w $TIMEOUT $fd \
&& echo DONE \
|| exit_with_error "Timeout error (lockfile: $LOCKFILE)"
;;
esac
}

View File

@ -1,157 +0,0 @@
#!/bin/bash
#[ -z "$RESYNCONLY" ] && RESYNCONLY=false
[ -z "$REPO_BASE_PATH" ] && REPO_BASE_PATH=${HOME}/pubrepos
[ -z "$PKG_PATH" ] && echo "ERROR: Remote path to built packages is not defined" && exit 1
WRK_DIR=`pwd`
TMP_DIR=${WRK_DIR}/.tmpdir
error () {
echo
echo -e "ERROR: $*"
echo
exit 1
}
info () {
echo
echo -e "INFO: $*"
echo
}
_sigul () {
local PASSWD=$1
shift
printf '%s\0' "$PASSWD" | sigul --batch $@
}
check-gpg() {
local RESULT=0
[ -z "$SIGKEYID" ] && echo "WARNING: No secret keys given" && RESULT=1
# Test secret keys
[ $RESULT -eq 0 ] && [ `gpg --list-secret-keys | grep ^sec | grep -c "$SIGKEYID"` -eq 0 ] && error "No secret keys found"
# Check for password
if [ $RESULT -eq 0 ] ; then
timeout 5s bash -c "echo test | gpg -q --no-tty --batch --no-verbose --local-user $SIGKEYID -so - &>/dev/null" \
|| error "Unable to sign with $SIGKEYID key. Passphrase needed!"
fi
[ $RESULT -ne 0 ] && echo "WARNING: Fall back to unsigned mode"
return $RESULT
}
check-sigul() {
local SIGKEYID=$1
local SIGUL_USER=$2
local SIGUL_ADMIN_PASSWD=$3
local RESULT=0
# Test of secret key and definiton of sigul
[ -z "$SIGKEYID" ] && echo "WARNING: No secret keys given" && RESULT=1
[ -z "$SIGUL_USER" ] && echo "WARNING: No Sigul user given" && RESULT=1
[ -z "$SIGUL_ADMIN_PASSWD" ] && echo "WARNING: No Sigul Administration's password given" && RESULT=1
[ -z "$(which sigul)" ] && echo "WARNING: Sigul is not found" && RESULT=1
# Test of sigul or secret key availability
if [ $RESULT -eq 0 ] ; then
retry -c4 -s1 _sigul "$SIGUL_ADMIN_PASSWD" -u "$SIGUL_USER" list-keys > keys_list.tmp
[ $? -ne 0 ] && echo "WARNING: Something went wrong" && RESULT=1
fi
[ $RESULT -eq 0 ] && [ $(grep -c "$SIGKEYID" keys_list.tmp) -ne 1 ] && RESULT=1
[ $RESULT -ne 0 ] && echo "WARNING:No secret keys found or Sigul is unavailable. Fall back to local signed"
return $RESULT
}
retry() {
local count=3
local sleep=5
local optname
while getopts 'c:s:' optname
do
case $optname in
c) count=$OPTARG ;;
s) sleep=$OPTARG ;;
?) return 1 ;;
esac
done
shift $((OPTIND - 1))
local ec
while true
do
"$@" && true
ec=$?
(( count-- ))
if [[ $ec -eq 0 || $count -eq 0 ]]
then
break
else
sleep "$sleep"
fi
done
return "$ec"
}
sync-repo() {
local LOCAL_DIR=$1
local REMOTE_DIR=$2
local REQUEST_PATH_PREFIX=$3
[ -n "$4" ] && local REQUEST_NUM=$4
[ -n "$5" ] && local LP_BUG=$5
RSYNC_USER=${RSYNC_USER:-"mirror-sync"}
[ -z "$REMOTE_REPO_HOST" ] && error "Remote host to sync is not defined."
[ ! -d "${LOCAL_DIR}" ] && error "Repository ${LOCAL_DIR} doesn't exist!"
## SYNC
source $(dirname `readlink -e $0`)/functions/rsync_functions.sh
mirrors_fail=""
for host in $REMOTE_REPO_HOST; do
# sync files to remote host
# $1 - remote host
# $2 - rsync user
# $3 - local dir
# $4 - remote dir
if [ "$GERRIT_CHANGE_STATUS" == "NEW" ] ; then
rsync_create_dir $host $RSYNC_USER ${REQUEST_PATH_PREFIX}
if [ -n "$LP_BUG" ] && [ -n "$REQUEST_NUM" ] ; then
# Remove existing REQUEST_NUM repository and set it as symlink to LP_BUG one
if [ $(rsync_list_links $host $RSYNC_USER ${REQUEST_PATH_PREFIX} | grep -c "^${REQUEST_NUM} ") -eq 0 ] ; then
rsync_delete_dir $host $RSYNC_USER ${REQUEST_PATH_PREFIX}${REQUEST_NUM}
else
rsync_delete_file $host $RSYNC_USER ${REQUEST_PATH_PREFIX}${REQUEST_NUM}
fi
rsync_create_symlink $host $RSYNC_USER ${REQUEST_PATH_PREFIX}${REQUEST_NUM} ${LP_BUG}
REMOTE_DIR=${REQUEST_PATH_PREFIX}${LP_BUG}/${REMOTE_DIR}
else
# Symlinked REQUEST_NUM repository should be removed in order to not affect LP_BUG one
[ $(rsync_list_links $host $RSYNC_USER ${REQUEST_PATH_PREFIX} | grep -c "^${REQUEST_NUM} ") -gt 0 ] \
&& rsync_delete_file $host $RSYNC_USER ${REQUEST_PATH_PREFIX}${REQUEST_NUM}
REMOTE_DIR=${REQUEST_PATH_PREFIX}${REQUEST_NUM}/${REMOTE_DIR}
fi
elif [ -n "$REQUEST_PATH_PREFIX" ] ; then
# Remove unused request repos
if [ -n "$REQUEST_NUM" ] ; then
if [ $(rsync_list_links $host $RSYNC_USER ${REQUEST_PATH_PREFIX} | grep -c "^${REQUEST_NUM} ") -eq 0 ] ; then
rsync_delete_dir $host $RSYNC_USER ${REQUEST_PATH_PREFIX}${REQUEST_NUM}
else
rsync_delete_file $host $RSYNC_USER ${REQUEST_PATH_PREFIX}${REQUEST_NUM}
fi
[ $(rsync_list_files $host $RSYNC_USER ${REQUEST_PATH_PREFIX} | grep -cF $REQUEST_NUM) -gt 0 ] \
&& rsync_delete_file $host $RSYNC_USER ${REQUEST_PATH_PREFIX}${REQUEST_NUM}.target.txt
fi
# Do not remove LP_BUG repo until all linked repos removed
[ -n "$LP_BUG" ] \
&& [ $(rsync_list_links $host $RSYNC_USER ${REQUEST_PATH_PREFIX} | grep -cF $LP_BUG) -eq 0 ] \
&& rsync_delete_dir $host $RSYNC_USER ${REQUEST_PATH_PREFIX}/$LP_BUG
fi
rsync_transfer $host $RSYNC_USER $LOCAL_DIR $REMOTE_DIR || mirrors_fail+=" ${host}"
done
#if [[ -n "$mirrors_fail" ]]; then
# echo Some mirrors failed to update: $mirrors_fail
# exit 1
#else
# export MIRROR_VERSION="${TGTDIR}"
# export MIRROR_BASE="http://$RSYNCHOST_MSK/fwm/files/${MIRROR_VERSION}"
# echo "MIRROR = ${mirror}" > ${WORKSPACE:-"."}/mirror_staging.txt
# echo "MIRROR_VERSION = ${MIRROR_VERSION}" >> ${WORKSPACE:-"."}/mirror_staging.txt
# echo "MIRROR_BASE = $MIRROR_BASE" >> ${WORKSPACE:-"."}/mirror_staging.txt
# echo "FUEL_MAIN_BRANCH = ${FUEL_MAIN_BRANCH}" >> ${WORKSPACE:-"."}/mirror_staging.txt
# echo "Updated: ${MIRROR_VERSION}<br> <a href='http://mirror.fuel-infra.org//${FILESROOT}/${TGTDIR}'>ext</a> <a href='http://${RSYNCHOST_MSK}/${FILESROOT}/${TGTDIR}'>msk</a> <a href='http://${RSYNCHOST_SRT}/${FILESROOT}/${TGTDIR}'>srt</a> <a href='http://${RSYNCHOST_KHA}/${FILESROOT}/${TGTDIR}'>kha</a>"
#fi
}

View File

@ -1,193 +0,0 @@
#!/bin/bash -xe
export LANG=C
# define this vars before use
SNAPSHOT_FOLDER=${SNAPSHOT_FOLDER:-"snapshots"}
LATESTSUFFIX=${LATESTSUFFIX:-"-latest"}
export DATE=$(date "+%Y-%m-%d-%H%M%S")
export SAVE_LAST_DAYS=${SAVE_LAST_DAYS:-61}
export WARN_DATE=$(date "+%Y%m%d" -d "$SAVE_LAST_DAYS days ago")
function get_empty_dir() {
echo $(mktemp -d)
}
function get_symlink() {
local LINKDEST=$1
local LINKNAME=$(mktemp -u)
ln -s --force $LINKDEST $LINKNAME && echo $LINKNAME
}
function rsync_delete_file() {
local RSYNCHOST=$1
local RSYNCUSER=$2
local FILENAME=$(basename $3)
local FILEPATH=$(dirname $3)
local EMPTYDIR=$(get_empty_dir)
rsync -rv --delete --include=$FILENAME '--exclude=*' \
$EMPTYDIR/ $RSYNCHOST::$RSYNCUSER/$FILEPATH/
[ ! -z "$EMPTYDIR" ] && rm -rf $EMPTYDIR
}
function rsync_delete_dir() {
local RSYNCHOST=$1
local RSYNCUSER=$2
local DIR=$3
local EMPTYDIR=$(get_empty_dir)
rsync --delete -a $EMPTYDIR/ $RSYNCHOST::$RSYNCUSER/$DIR/ \
&& rsync_delete_file $RSYNCHOST $RSYNCUSER $DIR
[ ! -z "$EMPTYDIR" ] && rm -rf $EMPTYDIR
}
function rsync_create_dir() {
local RSYNCHOST=$1
local RSYNCUSER=$2
local DIR=$3
local EMPTYDIR=$(get_empty_dir)
local OIFS="$IFS"
IFS='/'
local dir=''
local _dir=''
for _dir in $DIR ; do
IFS="$OIFS"
dir="${dir}/${_dir}"
rsync -a $EMPTYDIR/ $RSYNCHOST::$RSYNCUSER/$dir/
IFS='/'
done
IFS="$OIFS"
[ ! -z "$EMPTYDIR" ] && rm -rf $EMPTYDIR
}
function rsync_create_symlink() {
# Create symlink $3 -> $4
# E.g. "create_symlink repos/6.1 files/6.1-stable"
# wll create symlink repos/6.1 -> repos/files/6.1-stable
local RSYNCHOST=$1
local RSYNCUSER=$2
local LINKNAME=$3
local LINKDEST=$4
local SYMLINK_FILE=$(get_symlink "$LINKDEST")
rsync -vl $SYMLINK_FILE $RSYNCHOST::$RSYNCUSER/$LINKNAME
rm $SYMLINK_FILE
# Make text file for dereference symlinks
local TARGET_TXT_FILE=$(mktemp)
echo "$LINKDEST" > $TARGET_TXT_FILE
rsync -vl $TARGET_TXT_FILE $RSYNCHOST::$RSYNCUSER/${LINKNAME}.target.txt
rm $TARGET_TXT_FILE
}
function rsync_list() {
local RSYNCHOST=$1
local RSYNCUSER=$2
local DIR=$3
local TEMPFILE=$(mktemp)
set +e
rsync -l $RSYNCHOST::$RSYNCUSER/$DIR/ 2>/dev/null > $TEMPFILE
local RESULT=$?
[ "$RESULT" == "0" ] && cat $TEMPFILE | grep -v '\.$'
rm $TEMPFILE
set -e
return $RESULT
}
function rsync_list_links() {
local RSYNCHOST=$1
local RSYNCUSER=$2
local DIR=$3
local TEMPFILE=$(mktemp)
set +e
rsync_list $RSYNCHOST $RSYNCUSER $DIR > $TEMPFILE
local RESULT=$?
[ "$RESULT" == "0" ] && cat $TEMPFILE | grep '^l' | awk '{print $(NF-2)" "$NF}'
rm $TEMPFILE
set -e
return $RESULT
}
function rsync_list_dirs() {
local RSYNCHOST=$1
local RSYNCUSER=$2
local DIR=$3
local TEMPFILE=$(mktemp)
set +e
rsync_list $RSYNCHOST $RSYNCUSER $DIR > $TEMPFILE
local RESULT=$?
[ "$RESULT" == "0" ] && cat $TEMPFILE | grep '^d' | awk '{print $NF}'
rm $TEMPFILE
set -e
return $RESULT
}
function rsync_list_files() {
local RSYNCHOST=$1
local RSYNCUSER=$2
local DIR=$3
local TEMPFILE=$(mktemp)
set +e
rsync_list $RSYNCHOST $RSYNCUSER ${DIR} > $TEMPFILE
local RESULT=$?
[ "$RESULT" == "0" ] && cat $TEMPFILE | grep -vE '^d|^l' | awk '{print $NF}'
rm $TEMPFILE
set -e
return $RESULT
}
######################################################
function rsync_remove_old_versions() {
# Remove mirrors older then $SAVE_LAST_DAYS and w/o symlinks on it
local RSYNCHOST=$1
local RSYNCUSER=$2
local REMOTEPATH=$3
local FOLDERNAME=$4
DIRS=$(rsync_list_dirs $RSYNCHOST $RSYNCUSER $REMOTEPATH | grep "^$FOLDERNAME\-" )
for dir in $DIRS; do
ddate=$(echo $dir | awk -F '[-]' '{print $(NF-3)$(NF-2)$(NF-1)}')
[ "$ddate" -gt "$WARN_DATE" ] && continue
LINKS=$(rsync_list_links $RSYNCHOST $RSYNCUSER $REMOTEPATH | grep -F $dir ; rsync_list_links $RSYNCHOST $RSYNCUSER $(dirname $REMOTEPATH) | grep -F "$(basename $REMOTEPATH)/$dir")
if [ "$LINKS" = "" ]; then
rsync_delete_dir $RSYNCHOST $RSYNCUSER $REMOTEPATH/$dir
continue
fi
echo "Skip because symlinks $LINKS points to $dir"
done
}
######################################################
function rsync_transfer() {
# sync files to remote host
# $1 - remote host
# $2 - rsync module
# $3 - source dir 1/
# $4 - remote dir 1/2/3/4/5
# snapshots dir 1/2/3/4/snapshots
local RSYNC_HOST=$1
local RSYNC_USER=$2
local SOURCE_DIR=$3
local REMOTE_DIR=$4
local SNAPSHOT_DIR=$(echo $REMOTE_DIR | sed "s|$(basename ${REMOTE_DIR})$|${SNAPSHOT_FOLDER}|")
local SNAPSHOT_FOLDER=$(basename $SNAPSHOT_DIR) # snapshots
local SNAPSHOT_PATH=$(dirname $SNAPSHOT_DIR) # 1/2
local REMOTE_ROOT=$(echo $REMOTE_DIR | sed "s|^$SNAPSHOT_PATH/||")
local REMOTE_ROOT=${REMOTE_ROOT%%/*} # 3
rsync_list_dirs $RSYNC_HOST $RSYNC_USER $SNAPSHOT_DIR/${REMOTE_ROOT}-${DATE} \
|| rsync_create_dir $RSYNC_HOST $RSYNC_USER $SNAPSHOT_DIR/${REMOTE_ROOT}-${DATE}
OPTIONS="--archive --verbose --force --ignore-errors --delete-excluded --no-owner --no-group \
--delete --link-dest=/${SNAPSHOT_DIR}/${REMOTE_ROOT}${LATESTSUFFIX}"
rsync ${OPTIONS} ${SOURCE_DIR}/ ${RSYNC_HOST}::${RSYNC_USER}/${SNAPSHOT_DIR}/${REMOTE_ROOT}-${DATE}/ \
&& rsync_delete_file $RSYNC_HOST $RSYNC_USER ${SNAPSHOT_DIR}/${REMOTE_ROOT}${LATESTSUFFIX} \
&& rsync_create_symlink $RSYNC_HOST $RSYNC_USER ${SNAPSHOT_DIR}/${REMOTE_ROOT}${LATESTSUFFIX} ${REMOTE_ROOT}-${DATE} \
&& rsync_delete_file $RSYNC_HOST $RSYNC_USER ${SNAPSHOT_PATH}/${REMOTE_ROOT} \
&& rsync_create_symlink $RSYNC_HOST $RSYNC_USER ${SNAPSHOT_PATH}/${REMOTE_ROOT} ${SNAPSHOT_FOLDER}/${REMOTE_ROOT}-${DATE} \
&& rsync_remove_old_versions $RSYNC_HOST $RSYNC_USER ${SNAPSHOT_DIR} ${REMOTE_ROOT}
RESULT=$?
[ $RESULT -ne 0 ] && rsync_delete_dir $RSYNC_HOST $RSYNC_USER ${SNAPSHOT_DIR}/${REMOTE_ROOT}-${DATE}
return $RESULT
}

View File

@ -1,257 +0,0 @@
#!/bin/bash -ex
[ -f ".publisher-defaults-deb" ] && source .publisher-defaults-deb
source $(dirname $(readlink -e $0))/functions/publish-functions.sh
source $(dirname $(readlink -e $0))/functions/locking.sh
main() {
local SIGN_STRING=""
if check-sigul "$SIGKEYID" "$SIGUL_USER" "$SIGUL_ADMIN_PASSWD" ; then
USE_SIGUL="true"
SIGN_STRING="true"
else
check-gpg && SIGN_STRING="true"
fi
## Download sources from worker
[ -d $TMP_DIR ] && rm -rf $TMP_DIR
mkdir -p $TMP_DIR
rsync -avPzt \
-e "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ${SSH_OPTS}" \
${SSH_USER}${BUILD_HOST}:${PKG_PATH}/ ${TMP_DIR}/ || error "Can't download packages"
local DEB_BINARIES
DEB_BINARIES=$(find "$TMP_DIR" -name *.deb | awk -F'/' '{print $NF}' | cut -d'_' -f1 | tr '\n' ',')
## Resign source package
## FixMe: disabled for discussion: does it really need to sign
#[ -n "${SIGN_STRING}" ] && \
# for _dscfile in $(find ${TMP_DIR} -name "*.dsc") ; do
# debsign -pgpg --re-sign -k${SIGKEYID} ${_dscfile}
# done
# Create all repositories
# Paths
if [ -n "${CUSTOM_REPO_ID}" ] ; then
unset LP_BUG
REQUEST_NUM=${CUSTOM_REPO_ID}
fi
local URL_PREFIX=""
if [ "${GERRIT_CHANGE_STATUS}" = "NEW" ] ; then
REPO_BASE_PATH=${REPO_BASE_PATH}/${REPO_REQUEST_PATH_PREFIX}
URL_PREFIX=${REPO_REQUEST_PATH_PREFIX}
if [ -n "${LP_BUG}" ] ; then
REPO_BASE_PATH=${REPO_BASE_PATH}${LP_BUG}
URL_PREFIX=${URL_PREFIX}${LP_BUG}/
else
REPO_BASE_PATH=${REPO_BASE_PATH}${REQUEST_NUM}
URL_PREFIX=${URL_PREFIX}${REQUEST_NUM}/
fi
fi
# Repos
DEB_UPDATES_DIST_NAME=${DEB_UPDATES_DIST_NAME:-$DEB_DIST_NAME}
DEB_PROPOSED_DIST_NAME=${DEB_PROPOSED_DIST_NAME:-$DEB_DIST_NAME}
DEB_SECURITY_DIST_NAME=${DEB_SECURITY_DIST_NAME:-$DEB_DIST_NAME}
DEB_HOLDBACK_DIST_NAME=${DEB_HOLDBACK_DIST_NAME:-$DEB_DIST_NAME}
DEB_HOTFIX_DIST_NAME=${DEB_HOTFIX_DIST_NAME:-hotfix}
DEB_UPDATES_COMPONENT=${DEB_UPDATES_COMPONENT:-$DEB_COMPONENT}
DEB_PROPOSED_COMPONENT=${DEB_PROPOSED_COMPONENT:-$DEB_COMPONENT}
DEB_SECURITY_COMPONENT=${DEB_SECURITY_COMPONENT:-$DEB_COMPONENT}
DEB_HOLDBACK_COMPONENT=${DEB_HOLDBACK_COMPONENT:-$DEB_COMPONENT}
DEB_HOTFIX_COMPONENT=${DEB_HOTFIX_COMPONENT:-$DEB_COMPONENT}
local LOCAL_REPO_PATH=${REPO_BASE_PATH}/${DEB_REPO_PATH}
local DBDIR="+b/db"
local CONFIGDIR="${LOCAL_REPO_PATH}/conf"
local DISTDIR="${LOCAL_REPO_PATH}/public/dists/"
local OUTDIR="+b/public/"
if [ ! -d "${CONFIGDIR}" ] ; then
mkdir -p ${CONFIGDIR}
job_lock ${CONFIGDIR}.lock wait 3600
for dist_name in ${DEB_DIST_NAME} ${DEB_PROPOSED_DIST_NAME} \
${DEB_UPDATES_DIST_NAME} ${DEB_SECURITY_DIST_NAME} \
${DEB_HOLDBACK_DIST_NAME} ${DEB_HOTFIX_DIST_NAME} ; do
cat >> ${CONFIGDIR}/distributions <<- EOF
Origin: ${ORIGIN}
Label: ${DEB_DIST_NAME}
Suite: ${dist_name}
Codename: ${dist_name}
Version: ${PRODUCT_VERSION}
Architectures: amd64 i386 source
Components: main restricted
UDebComponents: main restricted
Contents: . .gz .bz2
EOF
reprepro --basedir ${LOCAL_REPO_PATH} --dbdir ${DBDIR} \
--outdir ${OUTDIR} --distdir ${DISTDIR} --confdir ${CONFIGDIR} \
export ${dist_name}
# Fix Codename field
local release_file="${DISTDIR}/${dist_name}/Release"
sed "s|^Codename:.*$|Codename: ${DEB_DIST_NAME}|" \
-i ${release_file}
rm -f ${release_file}.gpg
# ReSign Release file
[ -n "${SIGN_STRING}" ] \
&& gpg --sign --digest-algo SHA512 --local-user ${SIGKEYID} -ba \
-o ${release_file}.gpg ${release_file}
done
job_lock ${CONFIGDIR}.lock unset
fi
DEB_BASE_DIST_NAME=${DEB_DIST_NAME}
if [ "${IS_UPDATES}" = 'true' ] ; then
DEB_DIST_NAME=${DEB_PROPOSED_DIST_NAME}
DEB_COMPONENT=${DEB_PROPOSED_COMPONENT}
fi
if [ "${IS_HOLDBACK}" = 'true' ] ; then
DEB_DIST_NAME=${DEB_HOLDBACK_DIST_NAME}
DEB_COMPONENT=${DEB_HOLDBACK_COMPONENT}
fi
if [ "${IS_SECURITY}" = 'true' ] ; then
DEB_DIST_NAME=${DEB_SECURITY_DIST_NAME}
DEB_COMPONENT=${DEB_SECURITY_COMPONENT}
fi
if [ "${IS_HOTFIX}" = 'true' ] ; then
DEB_DIST_NAME=${DEB_HOTFIX_DIST_NAME}
DEB_COMPONENT=${DEB_HOTFIX_COMPONENT}
fi
[ -z "${DEB_COMPONENT}" ] && local DEB_COMPONENT=main
[ "${IS_RESTRICTED}" = 'true' ] && DEB_COMPONENT=restricted
local LOCAL_REPO_PATH=${REPO_BASE_PATH}/${DEB_REPO_PATH}
local CONFIGDIR="${LOCAL_REPO_PATH}/conf"
local DBDIR="+b/db"
local DISTDIR="${LOCAL_REPO_PATH}/public/dists/"
local OUTDIR="${LOCAL_REPO_PATH}/public/"
local REPREPRO_OPTS="--verbose --basedir ${LOCAL_REPO_PATH} --dbdir ${DBDIR} \
--outdir ${OUTDIR} --distdir ${DISTDIR} --confdir ${CONFIGDIR}"
local REPREPRO_COMP_OPTS="${REPREPRO_OPTS} --component ${DEB_COMPONENT}"
# Parse incoming files
local BINDEBLIST=""
local BINDEBNAMES=""
local BINUDEBLIST=""
local BINSRCLIST=""
for binary in ${TMP_DIR}/* ; do
case ${binary##*.} in
deb) BINDEBLIST="${BINDEBLIST} ${binary}"
BINDEBNAMES="${BINDEBNAMES} ${binary##*/}"
;;
udeb) BINUDEBLIST="${BINUDEBLIST} ${binary}" ;;
dsc) BINSRCLIST="${binary}" ;;
esac
done
job_lock ${CONFIGDIR}.lock wait 3600
local SRC_NAME=$(awk '/^Source:/ {print $2}' ${BINSRCLIST})
local NEW_VERSION=$(awk '/^Version:/ {print $2}' ${BINSRCLIST} | head -n 1)
local OLD_VERSION=$(reprepro ${REPREPRO_OPTS} --list-format '${version}\n' \
listfilter ${DEB_DIST_NAME} "Package (==${SRC_NAME})" | sort -u | head -n 1)
[ "${OLD_VERSION}" == "" ] && OLD_VERSION=none
# Remove existing packages for requests-on-review and downgrades
# TODO: Get rid of removing. Just increase version properly
if [ "${GERRIT_CHANGE_STATUS}" = "NEW" -o "$IS_DOWNGRADE" == "true" ] ; then
reprepro ${REPREPRO_OPTS} removesrc ${DEB_DIST_NAME} ${SRC_NAME} ${OLD_VERSION} || :
fi
# Add .deb binaries
if [ "${BINDEBLIST}" != "" ]; then
reprepro ${REPREPRO_COMP_OPTS} includedeb ${DEB_DIST_NAME} ${BINDEBLIST} \
|| error "Can't include packages"
fi
# Add .udeb binaries
if [ "${BINUDEBLIST}" != "" ]; then
reprepro ${REPREPRO_COMP_OPTS} includeudeb ${DEB_DIST_NAME} ${BINUDEBLIST} \
|| error "Can't include packages"
fi
# Replace sources
# TODO: Get rid of replacing. Just increase version properly
if [ "${BINSRCLIST}" != "" ]; then
for dist_name in ${DEB_BASE_DIST_NAME} ${DEB_PROPOSED_DIST_NAME} \
${DEB_UPDATES_DIST_NAME} ${DEB_SECURITY_DIST_NAME} \
${DEB_HOLDBACK_DIST_NAME} ${DEB_HOTFIX_DIST_NAME}; do
reprepro ${REPREPRO_COMP_OPTS} --architecture source \
remove ${dist_name} ${SRC_NAME} || :
# Fix Codename field and resign Release file if necessary
local _release_file=${DISTDIR}/${dist_name}/Release
local _inrelease_file=${DISTDIR}/${dist_name}/InRelease
if ! gpg --verify "${_release_file}.gpg" "$_release_file" &>/dev/null ; then
sed "s|^Codename:.*$|Codename: ${DEB_BASE_DIST_NAME}|" -i "$_release_file"
if [ "$USE_SIGUL" = "true" ] ; then
retry -c4 -s1 _sigul "$KEY_PASSPHRASE" -u "$SIGUL_USER" sign-data --armor -o "${_release_file}.gpg" "$SIGKEYID" "$_release_file"
retry -c4 -s1 _sigul "$KEY_PASSPHRASE" -u "$SIGUL_USER" sign-text -o "$_inrelease_file" "$SIGKEYID" "$_release_file"
else
gpg --sign --digest-algo SHA512 --local-user "$SIGKEYID" -ba -o "${_release_file}.gpg" "$_release_file"
gpg --sign --digest-algo SHA512 --local-user "$SIGKEYID" --clearsign -o "$_inrelease_file" "$_release_file"
fi
fi
done
reprepro ${REPREPRO_COMP_OPTS} includedsc ${DEB_DIST_NAME} ${BINSRCLIST} \
|| error "Can't include packages"
fi
# Cleanup files from previous version
[ "${OLD_VERSION}" != "${NEW_VERSION}" ] \
&& reprepro ${REPREPRO_OPTS} removesrc ${DEB_DIST_NAME} ${SRC_NAME} ${OLD_VERSION}
# Fix Codename field
local release_file="${DISTDIR}/${DEB_DIST_NAME}/Release"
local inrelease_file="${DISTDIR}/${DEB_DIST_NAME}/InRelease"
sed "s|^Codename:.*$|Codename: ${DEB_BASE_DIST_NAME}|" -i ${release_file}
# Resign Release file
rm -f "${release_file}.gpg" "$inrelease_file"
local pub_key_file="${LOCAL_REPO_PATH}/public/archive-${PROJECT_NAME}${PROJECT_VERSION}.key"
if [ -n "${SIGN_STRING}" ] ; then
[ ! -f "${pub_key_file}" ] && touch ${pub_key_file}
if [ "${USE_SIGUL}" = "true" ] ; then
retry -c4 -s1 _sigul "$KEY_PASSPHRASE" -u "$SIGUL_USER" sign-data --armor -o "${release_file}.gpg" "${SIGKEYID}" "${release_file}"
retry -c4 -s1 _sigul "$KEY_PASSPHRASE" -u "$SIGUL_USER" sign-text -o "$inrelease_file" "$SIGKEYID" "$release_file"
retry -c4 -s1 _sigul "$KEY_PASSPHRASE" -u "$SIGUL_ADMIN" get-public-key "${SIGKEYID}" > "${pub_key_file}.tmp"
else
gpg --sign --digest-algo SHA512 --local-user "$SIGKEYID" -ba -o "${release_file}.gpg" "$release_file"
gpg --sign --digest-algo SHA512 --local-user "$SIGKEYID" --clearsign -o "$inrelease_file" "$release_file"
gpg -o "${pub_key_file}.tmp" --armor --export "$SIGKEYID"
fi
if diff -q ${pub_key_file} ${pub_key_file}.tmp &>/dev/null ; then
rm ${pub_key_file}.tmp
else
mv ${pub_key_file}.tmp ${pub_key_file}
fi
else
rm -f ${pub_key_file}
fi
# Publish yaml files
local yaml_file=$(find "$TMP_DIR" -name *.yaml 2>/dev/null)
if [ -n "$yaml_file" ] ; then
local yaml_dir=${OUTDIR}/${DEB_DIST_NAME}-versioner
mkdir -p "$yaml_dir"
cp -v "$yaml_file" "$yaml_dir"
fi
sync-repo ${OUTDIR} ${DEB_REPO_PATH} ${REPO_REQUEST_PATH_PREFIX} ${REQUEST_NUM} ${LP_BUG}
job_lock ${CONFIGDIR}.lock unset
rm -f ${WRK_DIR}/deb.publish.setenvfile
cat > ${WRK_DIR}/deb.publish.setenvfile<<-EOF
DEB_PUBLISH_SUCCEEDED=true
DEB_DISTRO=${DIST}
DEB_REPO_URL="http://${REMOTE_REPO_HOST}/${URL_PREFIX}${DEB_REPO_PATH} ${DEB_DIST_NAME} ${DEB_COMPONENT}"
DEB_PACKAGENAME=${SRC_NAME}
DEB_VERSION=${NEW_VERSION}
DEB_BINARIES=$DEB_BINARIES
DEB_CHANGE_REVISION=${GERRIT_PATCHSET_REVISION}
LP_BUG=${LP_BUG}
EOF
}
main "$@"
exit 0

View File

@ -1,279 +0,0 @@
#!/bin/bash -ex
[ -f ".publisher-defaults-rpm" ] && source .publisher-defaults-rpm
source $(dirname $(readlink -e $0))/functions/publish-functions.sh
source $(dirname $(readlink -e $0))/functions/locking.sh
[ -z "${DEFAULTCOMPSXML}" ] && DEFAULTCOMPSXML=http://mirror.fuel-infra.org/fwm/6.0/centos/os/x86_64/comps.xml
main() {
if [ -n "${SIGKEYID}" ] ; then
# Is sigul availble and signed key exist
[ -n "${SIGUL_USER}" ] && check-sigul "$SIGKEYID" "$SIGUL_USER" "$SIGUL_ADMIN_PASSWD" && USE_SIGUL=true
if [ "${USE_SIGUL}" = "true" ] ; then
# Use sigul for sign
retry -c4 -s1 _sigul "$SIGUL_ADMIN_PASSWD" -u "$SIGUL_ADMIN" get-public-key "$SIGKEYID" > RPM-GPG-KEY
else
# Use local sign
check-gpg || :
gpg --export -a ${SIGKEYID} > RPM-GPG-KEY
if [ $(rpm -qa | grep gpg-pubkey | grep -ci ${SIGKEYID}) -eq 0 ]; then
rpm --import RPM-GPG-KEY
fi
fi
fi
# Get built binaries
[ -d ${TMP_DIR} ] && rm -rf ${TMP_DIR}
mkdir -p ${TMP_DIR}
rsync -avPzt -e "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ${SSH_OPTS}" \
${SSH_USER}${BUILD_HOST}:${PKG_PATH}/ ${TMP_DIR}/ || error "Can't download packages"
[ $(ls -1 ${TMP_DIR}/ | wc -l) -eq 0 ] && error "Can't download packages"
## Prepare repository
if [ -n "${CUSTOM_REPO_ID}" ] ; then
unset LP_BUG
REQUEST_NUM=${CUSTOM_REPO_ID}
fi
local URL_PREFIX=''
if [ "${GERRIT_CHANGE_STATUS}" == "NEW" ] ; then
REPO_BASE_PATH=${REPO_BASE_PATH}/${REPO_REQUEST_PATH_PREFIX}
URL_PREFIX=${REPO_REQUEST_PATH_PREFIX}
if [ -n "${LP_BUG}" ] ; then
REPO_BASE_PATH=${REPO_BASE_PATH}${LP_BUG}
URL_PREFIX=${URL_PREFIX}${LP_BUG}/
else
REPO_BASE_PATH=${REPO_BASE_PATH}${REQUEST_NUM}
URL_PREFIX=${URL_PREFIX}${REQUEST_NUM}/
fi
fi
# Create all repositories
RPM_UPDATES_REPO_PATH=${RPM_UPDATES_REPO_PATH:-$RPM_OS_REPO_PATH}
RPM_PROPOSED_REPO_PATH=${RPM_PROPOSED_REPO_PATH:-$RPM_OS_REPO_PATH}
RPM_SECURITY_REPO_PATH=${RPM_SECURITY_REPO_PATH:-$RPM_OS_REPO_PATH}
RPM_HOLDBACK_REPO_PATH=${RPM_HOLDBACK_REPO_PATH:-$RPM_OS_REPO_PATH}
RPM_HOTFIX_REPO_PATH=${RPM_HOTFIX_REPO_PATH:-${RPM_OS_REPO_PATH%/*}/hotfix}
for repo_path in ${RPM_OS_REPO_PATH} ${RPM_PROPOSED_REPO_PATH} \
${RPM_UPDATES_REPO_PATH} ${RPM_SECURITY_REPO_PATH} \
${RPM_HOLDBACK_REPO_PATH} ${RPM_HOTFIX_REPO_PATH} ; do
local LOCAL_REPO_PATH=${REPO_BASE_PATH}/${repo_path}
if [ ! -d "${LOCAL_REPO_PATH}" ] ; then
mkdir -p ${LOCAL_REPO_PATH}/{x86_64/Packages,Source/SPackages,x86_64/repodata}
job_lock ${LOCAL_REPO_PATH}.lock wait 3600
createrepo --pretty --database --update -o ${LOCAL_REPO_PATH}/x86_64/ ${LOCAL_REPO_PATH}/x86_64
createrepo --pretty --database --update -o ${LOCAL_REPO_PATH}/Source/ ${LOCAL_REPO_PATH}/Source
job_lock ${LOCAL_REPO_PATH}.lock unset
fi
done
RPM_REPO_PATH=${RPM_OS_REPO_PATH}
[ "${IS_UPDATES}" == 'true' ] && RPM_REPO_PATH=${RPM_PROPOSED_REPO_PATH}
[ "${IS_HOLDBACK}" == 'true' ] && RPM_REPO_PATH=${RPM_HOLDBACK_REPO_PATH}
[ "${IS_SECURITY}" == 'true' ] && RPM_REPO_PATH=${RPM_SECURITY_REPO_PATH}
[ "${IS_HOTFIX}" == 'true' ] && RPM_REPO_PATH=${RPM_HOTFIX_REPO_PATH}
local LOCAL_REPO_PATH=${REPO_BASE_PATH}/${RPM_REPO_PATH}
# Parse binary list
local BINRPMLIST=""
local BINSRCLIST=""
local BINSRCNAMES=""
local BINRPMNAMES=""
for binary in ${TMP_DIR}/* ; do
if [ "${binary:(-7)}" == "src.rpm" ] ; then
BINSRCLIST="${binary}"
BINSRCNAMES="${binary##*/}"
elif [ "${binary##*.}" == "rpm" ]; then
BINRPMLIST="${BINRPMLIST} ${binary}"
BINRPMNAMES="${BINRPMNAMES} ${binary##*/}"
fi
done
BINNAMES="${BINSRCNAMES} ${BINRPMNAMES}"
local PACKAGENAMES=""
# Get existing srpm filename
local SRPM_NAME=$(rpm -qp --queryformat "%{NAME}" ${BINSRCLIST})
local _repoid_source=$(mktemp -u XXXXXXXX)
local repoquery_opts="--repofrompath=${_repoid_source},file://${LOCAL_REPO_PATH}/Source/ --repoid=${_repoid_source}"
local EXIST_SRPM_FILE=$(repoquery ${repoquery_opts} --archlist=src --location ${SRPM_NAME})
local EXIST_SRPM_FILE=${EXIST_SRPM_FILE##*/}
# Get existing rpm files
local repoquerysrpm_py="$(dirname $(readlink -e $0))/repoquerysrpm.py"
local EXIST_RPM_FILES=$(python ${repoquerysrpm_py} --srpm=${EXIST_SRPM_FILE} --path=${LOCAL_REPO_PATH}/x86_64/ | awk -F'/' '{print $NF}')
# Cleanup `repoquery` data
find /var/tmp/yum-${USER}-* -type d -name $_repoid_source -exec rm -rf {} \; 2>/dev/null || :
job_lock ${LOCAL_REPO_PATH}.lock wait 3600
# Sign and publish binaries
for binary in ${BINRPMLIST} ${BINSRCLIST} ; do
local PACKAGEFOLDER=x86_64/Packages
[ "${binary:(-7)}" == "src.rpm" ] && PACKAGEFOLDER=Source/SPackages
# Get package info
local NEWBINDATA=$(rpm -qp --queryformat "%{EPOCH} %{NAME} %{VERSION} %{RELEASE} %{SHA1HEADER}\n" ${binary} 2>/dev/null)
local NEWBINEPOCH=$(echo ${NEWBINDATA} | cut -d' ' -f1)
[ "${NEWBINEPOCH}" == "(none)" ] && NEWBINEPOCH='0'
local BINNAME=$(echo ${NEWBINDATA} | cut -d' ' -f2)
[ "${binary:(-7)}" != "src.rpm" ] && local PACKAGENAMES="${PACKAGENAMES} ${BINNAME}"
local NEWBINVERSION=$(echo ${NEWBINDATA} | cut -d' ' -f3)
local NEWBINRELEASE=$(echo ${NEWBINDATA} | cut -d' ' -f4)
local NEWBINSHA=$(echo ${NEWBINDATA} | cut -d' ' -f5)
# EXISTBINDATA format pkg-name-epoch:version-release.arch (NEVRA)
local _repoid_os=$(mktemp -u XXXXXXXX)
local _repoid_updates=$(mktemp -u XXXXXXXX)
local _repoid_proposed=$(mktemp -u XXXXXXXX)
local _repoid_holdback=$(mktemp -u XXXXXXXX)
local _repoid_security=$(mktemp -u XXXXXXXX)
local _repoid_hotfix=$(mktemp -u XXXXXXXX)
local repoquery_cmd="repoquery --repofrompath=${_repoid_os},file://${REPO_BASE_PATH}/${RPM_OS_REPO_PATH}/${PACKAGEFOLDER%/*} --repoid=${_repoid_os}"
local repoquery_cmd="${repoquery_cmd} --repofrompath=${_repoid_updates},file://${REPO_BASE_PATH}/${RPM_UPDATES_REPO_PATH}/${PACKAGEFOLDER%/*} --repoid=${_repoid_updates}"
local repoquery_cmd="${repoquery_cmd} --repofrompath=${_repoid_holdback},file://${REPO_BASE_PATH}/${RPM_HOLDBACK_REPO_PATH}/${PACKAGEFOLDER%/*} --repoid=${_repoid_holdback}"
local repoquery_cmd="${repoquery_cmd} --repofrompath=${_repoid_security},file://${REPO_BASE_PATH}/${RPM_SECURITY_REPO_PATH}/${PACKAGEFOLDER%/*} --repoid=${_repoid_security}"
# Hotfixes must not be checked against proposed repository
if [ "${IS_HOTFIX}" != 'true' ]; then
local repoquery_cmd="${repoquery_cmd} --repofrompath=${_repoid_proposed},file://${REPO_BASE_PATH}/${RPM_PROPOSED_REPO_PATH}/${PACKAGEFOLDER%/*} --repoid=${_repoid_proposed}"
else
local repoquery_cmd="${repoquery_cmd} --repofrompath=${_repoid_hotfix},file://${REPO_BASE_PATH}/${RPM_HOTFIX_REPO_PATH}/${PACKAGEFOLDER%/*} --repoid=${_repoid_hotfix}"
fi
[ "${binary:(-7)}" == "src.rpm" ] && repoquery_cmd="${repoquery_cmd} --archlist=src"
local EXISTBINDATA=$(${repoquery_cmd} ${BINNAME} 2>/dev/null)
# Cleanup `repoquery` data
for _repoid in $_repoid_os $_repoid_updates $_repoid_proposed $_repoid_holdback $_repoid_security ; do
find /var/tmp/yum-${USER}-* -type d -name $_repoid -exec rm -rf {} \; 2>/dev/null || :
done
# Get arch
local EXISTBINARCH=${EXISTBINDATA##*.}
# Skip arch
local EXISTBINDATA=${EXISTBINDATA%.*}
# Get epoch
local EXISTBINEPOCH=$(echo ${EXISTBINDATA} | cut -d':' -f1 | awk -F'-' '{print $NF}')
# Skip "pkg-name-epoch:"
local EXISTBINDATA=${EXISTBINDATA#*:}
# Get version
local EXISTBINVERSION=${EXISTBINDATA%%-*}
# Get release
local EXISTBINRELEASE=${EXISTBINDATA#*-}
## FixMe: Improve packages removing
# Remove existing packages from repo (for new change requests and downgrades)
if [ "${GERRIT_CHANGE_STATUS}" == "NEW" -o "$IS_DOWNGRADE" == "true" ] ; then
find ${LOCAL_REPO_PATH} -name "${BINNAME}-${EXISTBINVERSION}-${EXISTBINRELEASE}.${EXISTBINARCH}*" \
-exec rm -f {} \;
unset EXISTBINVERSION
fi
# Compare versions of new and existring packages
local SKIPPACKAGE=0
if [ ! -z "${EXISTBINVERSION}" ] ; then
############################################################
## Comparing versions before including package to the repo
##
CMPVER=$(python <(cat <<-HERE
from rpmUtils import miscutils
print miscutils.compareEVR(("${EXISTBINEPOCH}", "${EXISTBINVERSION}", "${EXISTBINRELEASE}"),
("${NEWBINEPOCH}", "${NEWBINVERSION}", "${NEWBINRELEASE}"))
HERE
))
# Results:
# 1 - EXISTBIN is newer than NEWBIN
# 0 - EXISTBIN and NEWBIN have the same version
# -1 - EXISTBIN is older than NEWBIN
case ${CMPVER} in
1) error "Can't publish ${binary#*/}. Existing ${BINNAME}-${EXISTBINEPOCH}:${EXISTBINVERSION}-${EXISTBINRELEASE} has newer version" ;;
0) # Check sha for identical package names
EXISTRPMFILE=$(${repoquery_cmd} --location ${BINNAME})
EXISTBINSHA=$(rpm -qp --queryformat "%{SHA1HEADER}" ${EXISTRPMFILE})
if [ "${NEWBINSHA}" == "${EXISTBINSHA}" ]; then
SKIPPACKAGE=1
echo "Skipping including of ${binary}. Existing ${BINNAME}-${EXISTBINEPOCH}:${EXISTBINVERSION}-${EXISTBINRELEASE} has the same version and checksum"
else
error "Can't publish ${binary#*/}. Existing ${BINNAME}-${EXISTBINEPOCH}:${EXISTBINVERSION}-${EXISTBINRELEASE} has the same version but different checksum"
fi
;;
*) : ;;
esac
##
############################################################
fi
############
## Signing
##
if [ -n "${SIGKEYID}" ] ; then
# rpmsign requires pass phrase. use `expect` to skip it
if [ "${USE_SIGUL}" = "true" ] ; then
mv "${binary}" "${binary}_unsign"
retry -c4 -s1 _sigul "$KEY_PASSPHRASE" -u "$SIGUL_USER" sign-rpm -o "${binary}" "$SIGKEYID" "${binary}_unsign"
rm -f "${binary}_unsign"
else
LANG=C expect <<EOL
spawn rpmsign --define "%__gpg_check_password_cmd /bin/true" --define "%_signature gpg" --define "%_gpg_name ${SIGKEYID}" --resign ${binary}
expect -exact "Enter pass phrase:"
send -- "Doesn't matter\r"
expect eof
lassign [wait] pid spawnid os_error_flag value
puts "exit status: \$value"
exit \$value
EOL
fi
[ $? -ne 0 ] && error "Something went wrong. Can't sign package ${binary#*/}"
fi
##
###########
[ "${SKIPPACKAGE}" == "0" ] && cp ${binary} ${LOCAL_REPO_PATH}/${PACKAGEFOLDER}
done
# Remove old packages
for file in ${EXIST_SRPM_FILE} ${EXIST_RPM_FILES} ; do
[ "${BINNAMES}" == "${BINNAMES/$file/}" ] \
&& find ${LOCAL_REPO_PATH} -type f -name ${file} -exec rm {} \; 2>/dev/null
done
rm -f $(repomanage --keep=1 --old ${LOCAL_REPO_PATH}/x86_64)
rm -f $(repomanage --keep=1 --old ${LOCAL_REPO_PATH}/Source)
# Update and sign repository metadata
[ ! -e ${LOCAL_REPO_PATH}/comps.xml ] && wget ${DEFAULTCOMPSXML} -O ${LOCAL_REPO_PATH}/comps.xml
createrepo --pretty --database --update -g ${LOCAL_REPO_PATH}/comps.xml -o ${LOCAL_REPO_PATH}/x86_64/ ${LOCAL_REPO_PATH}/x86_64
createrepo --pretty --database --update -o ${LOCAL_REPO_PATH}/Source/ ${LOCAL_REPO_PATH}/Source
if [ -n "${SIGKEYID}" ] ; then
rm -f ${LOCAL_REPO_PATH}/x86_64/repodata/repomd.xml.asc
rm -f ${LOCAL_REPO_PATH}/Source/repodata/repomd.xml.asc
if [ "${USE_SIGUL}" = true ] ; then
for TYPE in x86_64 Source ; do
retry -c4 -s1 _sigul "$KEY_PASSPHRASE" -u "$SIGUL_USER" sign-data --armor -o "${LOCAL_REPO_PATH}/${TYPE}/repodata/repomd.asc" "${SIGKEYID}" "${LOCAL_REPO_PATH}/${TYPE}/repodata/repomd.xml"
done
else
gpg --armor --digest-algo SHA512 --local-user ${SIGKEYID} --detach-sign ${LOCAL_REPO_PATH}/x86_64/repodata/repomd.xml
gpg --armor --digest-algo SHA512 --local-user ${SIGKEYID} --detach-sign ${LOCAL_REPO_PATH}/Source/repodata/repomd.xml
fi
[ -f "RPM-GPG-KEY" ] && cp RPM-GPG-KEY ${LOCAL_REPO_PATH}/RPM-GPG-KEY-${PROJECT_NAME}${PROJECT_VERSION}
fi
# Publish yaml files
local yaml_file=$(find "$TMP_DIR" -name *.yaml 2>/dev/null)
if [ -n "$yaml_file" ] ; then
local yaml_dir=${LOCAL_REPO_PATH}/versioner
mkdir -p "$yaml_dir"
cp -v "$yaml_file" "$yaml_dir"
fi
# Sync repo to remote host
sync-repo ${LOCAL_REPO_PATH}/ ${RPM_REPO_PATH} ${REPO_REQUEST_PATH_PREFIX} ${REQUEST_NUM} ${LP_BUG}
job_lock ${LOCAL_REPO_PATH}.lock unset
rm -f ${WRK_DIR}/rpm.publish.setenvfile
cat > ${WRK_DIR}/rpm.publish.setenvfile <<-EOF
RPM_PUBLISH_SUCCEEDED=true
RPM_DISTRO=${DIST}
RPM_VERSION=${NEWBINEPOCH}:${NEWBINVERSION}-${NEWBINRELEASE}
RPM_REPO_URL=http://${REMOTE_REPO_HOST}/${URL_PREFIX}${RPM_REPO_PATH}/x86_64
RPM_BINARIES=$(echo ${PACKAGENAMES} | sed 's|^ ||; s| |,|g')
RPM_CHANGE_REVISION=${GERRIT_PATCHSET_REVISION}
LP_BUG=${LP_BUG}
EOF
}
main "$@"
exit 0

View File

@ -1,66 +0,0 @@
#!/usr/bin/python
from __future__ import print_function
import argparse
import gzip
import os
from lxml import etree as ET
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'-s', '--srpm', dest='srpm', action='store', type=str,
help='srpm', required=True, default='none'
)
parser.add_argument(
'-p', '--path', dest='path', action='store', type=str,
help='path', required=True, default='.'
)
params, other_params = parser.parse_known_args()
repomdpath = os.path.join(params.path, 'repodata', 'repomd.xml')
tree = ET.parse(repomdpath)
repomd = tree.getroot()
xmlpath = {}
for data in repomd.findall(ET.QName(repomd.nsmap[None], 'data')):
filetype = data.attrib['type']
xmlpath[filetype] = data.find(
ET.QName(repomd.nsmap[None], 'location')).attrib['href']
primaryfile = os.path.join(params.path, xmlpath['primary'])
with gzip.open(primaryfile, 'rb') as f:
primary_content = f.read()
primary = ET.fromstring(primary_content)
filtered = primary.xpath('//rpm:sourcerpm[text()="' + params.srpm + '"]',
namespaces={'rpm': primary.nsmap['rpm']})
for item in filtered:
name = item.getparent().getparent().find(
ET.QName(primary.nsmap[None], 'name')).text
arch = item.getparent().getparent().find(
ET.QName(primary.nsmap[None], 'arch')).text
epoch = item.getparent().getparent().find(
ET.QName(primary.nsmap[None], 'version')).attrib['epoch']
ver = item.getparent().getparent().find(
ET.QName(primary.nsmap[None], 'version')).attrib['ver']
rel = item.getparent().getparent().find(
ET.QName(primary.nsmap[None], 'version')).attrib['rel']
location = item.getparent().getparent().find(
ET.QName(primary.nsmap[None], 'location')).attrib['href']
print('{name} {epoch} {ver} {rel} {arch} {location}'.format(
name=name, epoch=epoch, ver=ver, rel=rel,
arch=arch, location=location))
if __name__ == "__main__":
main()

12
tox.ini
View File

@ -1,12 +0,0 @@
[tox]
minversion = 1.6
envlist = pep8
skipsdist = True
[testenv]
usedevelop = False
whitelist_externals = bash
commands =
[testenv:pep8]
commands = bash -c "exit 0"