Merge "Remove unused jenkins/scripts scripts"

This commit is contained in:
Zuul 2017-11-28 09:41:20 +00:00 committed by Gerrit Code Review
commit 7ab9e5175e
18 changed files with 0 additions and 1238 deletions

View File

@ -1,75 +0,0 @@
#!/bin/bash -xe
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Install all known OpenStackClient projects that have plugins. Then install
# the proposed change to see if there are any conflicting commands.
# install openstackclient plugins from source to catch conflicts earlier
function install_from_source {
repo=$1
root=$(mktemp -d)
$zc --cache-dir /opt/git --workspace ${root} \
https://git.openstack.org openstack/${repo}
(cd ${root}/openstack/${repo} && $venv/bin/pip install .)
rm -rf $root
}
zc='/usr/zuul-env/bin/zuul-cloner'
# setup a virtual environment to install all the plugins
venv_name='osc_plugins'
trap "rm -rf $venv_name" EXIT
virtualenv $venv_name
venv=$(pwd)/$venv_name
# install known OpenStackClient plugins
install_from_source python-openstackclient
install_from_source python-barbicanclient
install_from_source python-cloudkittyclient
install_from_source python-congressclient
install_from_source python-designateclient
install_from_source python-heatclient
install_from_source python-ironicclient
install_from_source python-ironic-inspector-client
install_from_source python-karborclient
install_from_source python-mistralclient
install_from_source python-muranoclient
install_from_source python-neutronclient
install_from_source python-octaviaclient
install_from_source python-pankoclient
install_from_source python-rsdclient
install_from_source python-saharaclient
install_from_source python-searchlightclient
install_from_source python-senlinclient
install_from_source python-tripleoclient
install_from_source python-troveclient
install_from_source python-vitrageclient
install_from_source python-watcherclient
install_from_source python-zaqarclient
echo "Begin freeze output from $venv virtualenv:"
echo "======================================================================"
$venv/bin/pip freeze
echo "======================================================================"
# now check the current proposed change doesn't cause a conflict
# we should already be in the project's root directory where setup.py exists
echo "Installing the proposed change in directory: $(pwd)"
$venv/bin/pip install -e .
echo "Testing development version of openstack client, version:"
$venv/bin/openstack --version
# run the python script to actually check the commands now that we're setup
$venv/bin/python /usr/local/jenkins/slave_scripts/check_osc_commands.py

View File

@ -1,38 +0,0 @@
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import requests
import requestsexceptions
import yaml
def main():
requestsexceptions.squelch_warnings()
data = yaml.load(open('openstack_catalog/web/static/assets.yaml'))
assets = {}
for a in data['assets']:
url = a.get('attributes', {}).get('url')
if url:
r = requests.head(url, allow_redirects=True)
if r.status_code != 200:
assets[a['name']] = {'active': False}
with open('openstack_catalog/web/static/assets_dead.yaml', 'w') as out:
out.write(yaml.safe_dump({"assets": assets}))
if __name__ == '__main__':
main()

View File

@ -1,181 +0,0 @@
#! /usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This module will use `pkg_resources` to scan commands for all OpenStackClient
plugins with the purpose of detecting duplicate commands.
"""
import pkg_resources
import traceback
def find_duplicates():
"""Find duplicates commands.
Here we use `pkg_resources` to find all modules. There will be many modules
on a system, so we filter them out based on "openstack" since that is the
prefix that OpenStackClient plugins will have.
Each module has various entry points, each OpenStackClient command will
have an entrypoint. Each entry point has a short name (ep.name) which
is the command the user types, as well as a long name (ep.module_name)
which indicates from which module the entry point is from.
For example, the entry point and module for v3 user list is::
module => openstackclient.identity.v3
ep.name => user_list
ep.module_name => openstackclient.identity.v3.user
We keep a running tally of valid commands, duplicate commands and commands
that failed to load.
The resultant data structure for valid commands should look like::
{'user_list':
['openstackclient.identity.v3.user',
'openstackclient.identity.v2.0.user']
'flavor_list':
[openstackclient.compute.v2.flavor']
}
The same can be said for the duplicate and failed commands.
"""
valid_cmds = {}
duplicate_cmds = {}
failed_cmds = {}
# find all modules on the system
modules = set()
for dist in pkg_resources.working_set:
entry_map = pkg_resources.get_entry_map(dist)
modules.update(set(entry_map.keys()))
for module in modules:
# OpenStackClient plugins are prefixed with "openstack", skip otherwise
if not module.startswith('openstack'):
continue
# Iterate over all entry points
for ep in pkg_resources.iter_entry_points(module):
# Check for a colon, since valid entrypoints will have one, for
# example: quota_show = openstackclient.common.quota:ShowQuota
# and plugin entrypoints will not, for
# example: orchestration = heatclient.osc.plugin
if ':' not in str(ep):
continue
# cliff does a mapping between spaces and underscores
ep_name = ep.name.replace(' ', '_')
try:
ep.load()
except Exception:
exc_string = traceback.format_exc()
message = "{}\n{}".format(ep.module_name, exc_string)
failed_cmds.setdefault(ep_name, []).append(message)
if _is_valid_command(ep_name, ep.module_name, valid_cmds):
valid_cmds.setdefault(ep_name, []).append(ep.module_name)
else:
duplicate_cmds.setdefault(ep_name, []).append(ep.module_name)
if duplicate_cmds:
print("Duplicate commands found...")
print(duplicate_cmds)
return True
if failed_cmds:
print("Some commands failed to load...")
print(failed_cmds)
return True
overlap_cmds = _check_command_overlap(valid_cmds)
if overlap_cmds:
print("WARNING: Some commands overlap...")
print(overlap_cmds)
# FIXME(stevemar): when we determine why commands are overlapping
# we can uncomment the line below.
# return True
# Safely return False here with the full set of commands
print("Final set of commands...")
print(valid_cmds)
print("Found no duplicate or overlapping commands, OK to merge!")
return False
def _check_command_overlap(valid_cmds):
"""Determine if the entry point overlaps with another command.
For example, if one plugin creates the command "object1 action",
and another plugin creates the command "object1 action object2",
the object2 command is unreachable since it overlaps the
namespace.
"""
overlap_cmds = {}
for ep_name, ep_mods in valid_cmds.iteritems():
# Skip openstack.cli.base client entry points
for ep_mod in ep_mods:
for ep_name_search in valid_cmds.keys():
if ep_name_search.startswith(ep_name + "_"):
overlap_cmds.setdefault(ep_name, []).append(ep_name_search)
return overlap_cmds
def _is_valid_command(ep_name, ep_module_name, valid_cmds):
"""Determine if the entry point is valid.
Aside from a simple check to see if the entry point short name is in our
tally, we also need to check for allowed duplicates. For instance, in the
case of supporting multiple versions, then we want to allow for duplicate
commands. Both the identity v2 and v3 APIs support `user_list`, so these
are fine.
In order to determine if an entry point is a true duplicate we can check to
see if the module name roughly matches the module name of the entry point
that was initially added to the set of valid commands.
The following should trigger a match::
openstackclient.identity.v3.user and openstackclient.identity.v*.user
Whereas, the following should fail::
openstackclient.identity.v3.user and openstackclient.baremetal.v3.user
"""
if ep_name not in valid_cmds:
return True
else:
# there already exists an entry in the dictionary for the command...
module_parts = ep_module_name.split(".")
for valid_module_name in valid_cmds[ep_name]:
valid_module_parts = valid_module_name.split(".")
if (module_parts[0] == valid_module_parts[0] and
module_parts[1] == valid_module_parts[1] and
module_parts[3] == valid_module_parts[3]):
return True
return False
if __name__ == '__main__':
print("Checking 'openstack' plug-ins")
if find_duplicates():
exit(1)
else:
exit(0)

View File

@ -1,7 +0,0 @@
#!/bin/bash -x
lvremove -f /dev/main/last_root
lvrename /dev/main/root last_root
lvcreate -L20G -s -n root /dev/main/orig_root
APPEND="$(cat /proc/cmdline)"
kexec -l /vmlinuz --initrd=/initrd.img --append="$APPEND"
nohup bash -c "sleep 2; kexec -e" </dev/null >/dev/null 2>&1 &

View File

@ -1,44 +0,0 @@
#!/bin/bash -x
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Upload java packages to maven repositories
PROJECT=$1
VERSION=$2
META_DATA_FILE=$3
PLUGIN_FILE=$4
# Strip project name and extension leaving only the version.
VERSION=$(echo ${PLUGIN_FILE} | sed -n "s/${PROJECT}-\(.*\).jar/\1/p")
# generate pom file with version info
POM_IN_ZIP=$(unzip -Z -1 ${PLUGIN_FILE}|grep pom.xml)
unzip -o -j ${PLUGIN_FILE} ${POM_IN_ZIP}
sed "s/\${{project-version}}/${VERSION}/g" <pom.xml >${META_DATA_FILE}
# deploy plugin artifacts from workspace to maven central repository
MAVEN_REPO="https://oss.sonatype.org/content/groups/public/maven"
MAVEN_REPO_CREDS="/home/jenkins/.mavencentral-curl"
curl -X PUT \
--config ${MAVEN_REPO_CREDS} \
--data-binary @${META_DATA_FILE} \
-i "${MAVEN_REPO}/${PROJECT}/${VERSION}/${META_DATA_FILE}" > /dev/null 2>&1
curl -X PUT \
--config ${MAVEN_REPO_CREDS} \
--data-binary @${PLUGIN_FILE} \
-i "${MAVEN_REPO}/${PROJECT}/${VERSION}/${PLUGIN_FILE}" > /dev/null 2>&1

View File

@ -1,22 +0,0 @@
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from subprocess import Popen, PIPE
p = Popen(["ping", sys.argv[1]], stdout=PIPE)
while True:
line = p.stdout.readline().strip()
if 'bytes from' in line:
p.terminate()
sys.exit(0)

View File

@ -1,269 +0,0 @@
#! /usr/bin/env python
# Copyright (C) 2011 OpenStack, LLC.
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import collections
import contextlib
import os
import shlex
import shutil
import subprocess
import sys
import tempfile
requirement = None
project = None
def run_command(cmd):
print(cmd)
cmd_list = shlex.split(str(cmd))
p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(out, err) = p.communicate()
if p.returncode != 0:
raise SystemError(err)
return (out.strip(), err.strip())
class RequirementsList(object):
def __init__(self, name, project):
self.name = name
self.reqs_by_file = {}
self.project = project
self.failed = False
@property
def reqs(self):
return {k: v for d in self.reqs_by_file.values()
for k, v in d.items()}
def extract_reqs(self, content, strict):
reqs = collections.defaultdict(set)
parsed = requirement.parse(content)
for name, entries in parsed.items():
if not name:
# Comments and other unprocessed lines
continue
list_reqs = [r for (r, line) in entries]
# Strip the comments out before checking if there are duplicates
list_reqs_stripped = [r._replace(comment='') for r in list_reqs]
if strict and len(list_reqs_stripped) != len(set(
list_reqs_stripped)):
print("Requirements file has duplicate entries "
"for package %s : %r." % (name, list_reqs))
self.failed = True
reqs[name].update(list_reqs)
return reqs
def process(self, strict=True):
"""Convert the project into ready to use data.
- an iterable of requirement sets to check
- each set has the following rules:
- each has a list of Requirements objects
- duplicates are not permitted within that list
"""
print("Checking %(name)s" % {'name': self.name})
# First, parse.
for fname, content in self.project.get('requirements', {}).items():
print("Processing %(fname)s" % {'fname': fname})
if strict and not content.endswith('\n'):
print("Requirements file %s does not "
"end with a newline." % fname)
self.reqs_by_file[fname] = self.extract_reqs(content, strict)
for name, content in project.extras(self.project).items():
print("Processing .[%(extra)s]" % {'extra': name})
self.reqs_by_file[name] = self.extract_reqs(content, strict)
def grab_args():
"""Grab and return arguments"""
parser = argparse.ArgumentParser(
description="Check if project requirements have changed"
)
parser.add_argument('--local', action='store_true',
help='check local changes (not yet in git)')
parser.add_argument('branch', nargs='?', default='master',
help='target branch for diffs')
parser.add_argument('--zc', help='what zuul cloner to call')
parser.add_argument('--reqs', help='use a specified requirements tree')
return parser.parse_args()
@contextlib.contextmanager
def tempdir():
try:
reqroot = tempfile.mkdtemp()
yield reqroot
finally:
shutil.rmtree(reqroot)
def install_and_load_requirements(reqroot, reqdir):
sha = run_command("git --git-dir %s/.git rev-parse HEAD" % reqdir)[0]
print("requirements git sha: %s" % sha)
req_venv = os.path.join(reqroot, 'venv')
req_pip = os.path.join(req_venv, 'bin/pip')
req_lib = os.path.join(req_venv, 'lib/python2.7/site-packages')
out, err = run_command("virtualenv " + req_venv)
out, err = run_command(req_pip + " install " + reqdir)
sys.path.append(req_lib)
global project
global requirement
from openstack_requirements import project # noqa
from openstack_requirements import requirement # noqa
def _is_requirement_in_global_reqs(req, global_reqs):
# Compare all fields except the extras field as the global
# requirements should not have any lines with the extras syntax
# example: oslo.db[xyz]<1.2.3
for req2 in global_reqs:
if (req.package == req2.package and
req.location == req2.location and
req.specifiers == req2.specifiers and
req.markers == req2.markers and
req.comment == req2.comment):
return True
return False
def main():
args = grab_args()
branch = args.branch
failed = False
# build a list of requirements from the global list in the
# openstack/requirements project so we can match them to the changes
with tempdir() as reqroot:
# Only clone requirements repo if no local repo is specified
# on the command line.
if args.reqs is None:
reqdir = os.path.join(reqroot, "openstack/requirements")
if args.zc is not None:
zc = args.zc
else:
zc = '/usr/zuul-env/bin/zuul-cloner'
out, err = run_command("%(zc)s "
"--cache-dir /opt/git "
"--workspace %(root)s "
"https://git.openstack.org "
"openstack/requirements"
% dict(zc=zc, root=reqroot))
print(out)
print(err)
else:
reqdir = args.reqs
install_and_load_requirements(reqroot, reqdir)
global_reqs = requirement.parse(
open(reqdir + '/global-requirements.txt', 'rt').read())
for k, entries in global_reqs.items():
# Discard the lines: we don't need them.
global_reqs[k] = set(r for (r, line) in entries)
blacklist = requirement.parse(
open(reqdir + '/blacklist.txt', 'rt').read())
cwd = os.getcwd()
# build a list of requirements in the proposed change,
# and check them for style violations while doing so
head = run_command("git rev-parse HEAD")[0]
head_proj = project.read(cwd)
head_reqs = RequirementsList('HEAD', head_proj)
# Don't apply strict parsing rules to stable branches.
# Reasoning is:
# - devstack etc protect us from functional issues
# - we're backporting to stable, so guarding against
# aesthetics and DRY concerns is not our business anymore
# - if in future we have other not-functional linty style
# things to add, we don't want them to affect stable
# either.
head_strict = not branch.startswith('stable/')
head_reqs.process(strict=head_strict)
if not args.local:
# build a list of requirements already in the target branch,
# so that we can create a diff and identify what's being changed
run_command("git remote update")
run_command("git checkout remotes/origin/%s" % branch)
branch_proj = project.read(cwd)
# switch back to the proposed change now
run_command("git checkout %s" % head)
else:
branch_proj = {'root': cwd}
branch_reqs = RequirementsList(branch, branch_proj)
# Don't error on the target branch being broken.
branch_reqs.process(strict=False)
# iterate through the changing entries and see if they match the global
# equivalents we want enforced
for fname, freqs in head_reqs.reqs_by_file.items():
print("Validating %(fname)s" % {'fname': fname})
for name, reqs in freqs.items():
counts = {}
if (name in branch_reqs.reqs and
reqs == branch_reqs.reqs[name]):
# Unchanged [or a change that preserves a current value]
continue
if name in blacklist:
# Blacklisted items are not synced and are managed
# by project teams as they see fit, so no further
# testing is needed.
continue
if name not in global_reqs:
failed = True
print("Requirement %s not in openstack/requirements" %
str(reqs))
continue
if reqs == global_reqs[name]:
continue
for req in reqs:
if req.extras:
for extra in req.extras:
counts[extra] = counts.get(extra, 0) + 1
else:
counts[''] = counts.get('', 0) + 1
if not _is_requirement_in_global_reqs(
req, global_reqs[name]):
failed = True
print("Requirement for package %s : %s does "
"not match openstack/requirements value : %s" % (
name, str(req), str(global_reqs[name])))
for extra, count in counts.items():
if count != len(global_reqs[name]):
failed = True
print("Package %s%s requirement does not match "
"number of lines (%d) in "
"openstack/requirements" % (
name,
('[%s]' % extra) if extra else '',
len(global_reqs[name])))
# report the results
if failed or head_reqs.failed or branch_reqs.failed:
print("*** Incompatible requirement found!")
print("*** See http://docs.openstack.org/developer/requirements")
sys.exit(1)
print("Updated requirements match openstack/requirements.")
if __name__ == '__main__':
main()

View File

@ -1,36 +0,0 @@
#!/bin/bash -xe
#
# Copyright 2012 Hewlett-Packard Development Company, L.P.
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Retrieve a python sdist and upload it to pypi with Curl.
PROJECT=$1
TARBALL_SITE=$2
TAG=$(echo $ZUUL_REF | sed 's/^refs.tags.//')
# Look in the setup.cfg to determine if a package name is specified, but
# fall back on the project name if necessary
DISTNAME=$(/usr/local/jenkins/slave_scripts/pypi-extract-name.py --tarball \
|| echo $PROJECT)
FILENAME="$DISTNAME-$TAG.tar.gz"
rm -rf *tar.gz
curl --fail -o $FILENAME https://$TARBALL_SITE/$PROJECT/$FILENAME
# Make sure we actually got a gzipped file
file -b $FILENAME | grep gzip
twine upload -r pypi $FILENAME

View File

@ -1,38 +0,0 @@
#!/bin/bash -xe
#
# Copyright 2012 Hewlett-Packard Development Company, L.P.
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Retrieve supported python wheels and upload them to pypi with Curl.
PROJECT=$1
TARBALL_SITE=$2
TAG=$(echo $ZUUL_REF | sed 's/^refs.tags.//')
# Look in the setup.cfg to determine if a package name is specified, but
# fall back on the project name if necessary
DISTNAME=$(/usr/local/jenkins/slave_scripts/pypi-extract-name.py --wheel \
|| echo $PROJECT)
# Look in the setup.cfg to see if this is a universal wheel or not
WHEELTYPE=$(/usr/local/jenkins/slave_scripts/pypi-extract-universal.py)
FILENAME="$DISTNAME-$TAG-$WHEELTYPE-none-any.whl"
rm -rf *.whl
curl --fail -o $FILENAME https://$TARBALL_SITE/$PROJECT/$FILENAME
# Make sure we actually got a wheel
file -b $FILENAME | grep -i zip
twine upload -r pypi $FILENAME

View File

@ -1,26 +0,0 @@
#!/bin/bash -xe
# Run coverage via tox. Also, run pbr freeze on the
# resulting environment at the end so that we have a record of exactly
# what packages we ended up testing.
export NOSE_COVER_HTML=1
export UPPER_CONSTRAINTS_FILE=$(pwd)/upper-constraints.txt
venv=${1:-cover}
# Workaround the combo of tox running setup.py outside of virtualenv
# and RHEL having an old distribute. The next line can be removed
# when either get fixed.
python setup.py --version
tox -e$venv
result=$?
[ -e .tox/$venv/bin/pbr ] && freezecmd=pbr || freezecmd=pip
echo "Begin $freezecmd freeze output from test virtualenv:"
echo "======================================================================"
.tox/${venv}/bin/${freezecmd} freeze
echo "======================================================================"
exit $result

View File

@ -1,94 +0,0 @@
#!/bin/bash -xe
# If a bundle file is present, call tox with the jenkins version of
# the test environment so it is used. Otherwise, use the normal
# (non-bundle) test environment. Also, run pbr freeze on the
# resulting environment at the end so that we have a record of exactly
# what packages we ended up testing.
#
venv=${1:-venv}
tags_handling=${2:both}
mkdir -p doc/build
export UPPER_CONSTRAINTS_FILE=$(pwd)/upper-constraints.txt
# The "python setup.py build_sphinx" is intentionally executed instead of
# "tox -edocs", because it's the standard python project build interface
# specified in OpenStack Project Testing Interface:
# http://governance.openstack.org/reference/project-testing-interface.html
tox -e$venv -- python setup.py build_sphinx
result=$?
# If the build has not already failed and whereto is installed then
# test the redirects defined in the project.
if [ $result -eq 0 ]; then
if [ -e .tox/$venv/bin/whereto ]; then
tox -e $venv -- whereto doc/source/_extra/.htaccess doc/test/redirect-tests.txt
result=$?
fi
fi
[ -e .tox/$venv/bin/pbr ] && freezecmd=pbr || freezecmd=pip
echo "Begin pbr freeze output from test virtualenv:"
echo "======================================================================"
.tox/${venv}/bin/${freezecmd} freeze
echo "======================================================================"
MARKER_TEXT="Project: $ZUUL_PROJECT Ref: $ZUUL_REFNAME Build: $ZUUL_UUID Revision: $ZUUL_NEWREV"
echo $MARKER_TEXT > doc/build/html/.root-marker
if [ -z "$ZUUL_REFNAME" ] || [ "$ZUUL_REFNAME" == "master" ] ; then
: # Leave the docs where they are.
elif echo $ZUUL_REFNAME | grep refs/tags/ >/dev/null ; then
# Put tagged releases in proper location. All tagged builds get copied to
# BUILD_DIR/tagname. If this is the latest tagged release the copy of files
# at BUILD_DIR remains. When Jenkins copies this file the root developer
# docs are always the latest release with older tags available under the
# root in the tagname dir.
TAG=$(echo $ZUUL_REFNAME | sed 's/refs.tags.//')
if [ ! -z $TAG ] ; then
# This is a hack to ignore the year.release tags in projects since
# now all projects use semver based versions instead of date based
# versions. The date versions will sort higher even though they
# should not so we just special case it here.
LATEST=$(git tag | sed -n -e '/^20[0-9]\{2\}\..*$/d' -e '/^[0-9]\+\(\.[0-9]\+\)*$/p' | sort -V | tail -1)
# Now publish to / and /$TAG if this is the latest version for projects
# and we are only publishing from the release pipeline,
# or just /$TAG otherwise.
if [ "$tags_handling" = "tags-only" -a "$TAG" = "$LATEST" ] ; then
# Copy the docs into a subdir if this is a tagged build
mkdir doc/build/$TAG
cp -R doc/build/html/. doc/build/$TAG
mv doc/build/$TAG doc/build/html/$TAG
else
# Move the docs into a subdir if this is a tagged build
mv doc/build/html doc/build/tmp
mkdir doc/build/html
mv doc/build/tmp doc/build/html/$TAG
fi
fi
elif echo $ZUUL_REFNAME | grep stable/ >/dev/null ; then
# Put stable release changes in dir named after stable release under the
# build dir. When Jenkins copies these files they will be accessible under
# the developer docs root using the stable release's name.
BRANCH=$(echo $ZUUL_REFNAME | sed 's/stable.//')
if [ ! -z $BRANCH ] ; then
# Move the docs into a subdir if this is a stable branch build
mv doc/build/html doc/build/tmp
mkdir doc/build/html
mv doc/build/tmp doc/build/html/$BRANCH
fi
else
# Put other branch changes in dir named after branch under the
# build dir. When Jenkins copies these files they will be
# accessible under the developer docs root using the branch name.
# EG: feature/foo or milestone-proposed
BRANCH=$ZUUL_REFNAME
TOP=`dirname $BRANCH`
mv doc/build/html doc/build/tmp
mkdir -p doc/build/html/$TOP
mv doc/build/tmp doc/build/html/$BRANCH
fi
exit $result

View File

@ -1,43 +0,0 @@
#!/bin/bash -x
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
venv=${1:-pep8}
export UPPER_CONSTRAINTS_FILE=$(pwd)/upper-constraints.txt
virtualenv sdist_check && sdist_check/bin/pip install pbr && sdist_check/bin/python setup.py sdist
sdistrc=$?
rm -rf sdist_check
tox -v -e$venv
rc=$?
[ -e .tox/$venv/bin/pbr ] && freezecmd=pbr || freezecmd=pip
echo "Begin $freezecmd freeze output from test virtualenv:"
echo "======================================================================"
.tox/${venv}/bin/${freezecmd} freeze
echo "======================================================================"
if [ ! $sdistrc ] ; then
echo "******************************************************************"
echo "Project cannot create sdist tarball!!!"
echo "To reproduce this locally, run: 'python setup.py sdist'"
echo "******************************************************************"
exit $sdistrc
fi
exit $rc

View File

@ -1,23 +0,0 @@
#!/bin/bash -xe
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
venv=${1:-pylint}
export UPPER_CONSTRAINTS_FILE=$(pwd)/upper-constraints.txt
set -o pipefail
tox -v -e$venv | tee pylint.txt
set +o pipefail

View File

@ -1,31 +0,0 @@
#!/bin/bash -xe
# If a bundle file is present, call tox with the jenkins version of
# the test environment so it is used. Otherwise, use the normal
# (non-bundle) test environment. Also, run pbr freeze on the
# resulting environment at the end so that we have a record of exactly
# what packages we ended up testing.
#
venv=venv
VDISPLAY=99
DIMENSIONS='1280x1024x24'
/usr/bin/Xvfb :${VDISPLAY} -screen 0 ${DIMENSIONS} 2>&1 > /dev/null &
set +e
DISPLAY=:${VDISPLAY} NOSE_WITH_XUNIT=1 tox -e$venv -- \
/bin/bash run_tests.sh -N --only-selenium
result=$?
pkill Xvfb 2>&1 > /dev/null
set -e
[ -e .tox/$venv/bin/pbr ] && freezecmd=pbr || freezecmd=pip
echo "Begin $freezecmd freeze output from test virtualenv:"
echo "======================================================================"
.tox/${venv}/bin/${freezecmd} freeze
echo "======================================================================"
exit $result

View File

@ -1,33 +0,0 @@
#!/bin/bash -xe
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
venv=${1:-venv}
export UPPER_CONSTRAINTS_FILE=$(pwd)/upper-constraints.txt
rm -f dist/*.tar.gz
tox -e$venv python setup.py sdist
FILES=dist/*.tar.gz
for f in $FILES; do
echo "SHA1sum for $f:"
sha1sum $f | awk '{print $1}' > $f.sha1
cat $f.sha1
echo "MD5sum for $f:"
md5sum $f | awk '{print $1}' > $f.md5
cat $f.md5
done

View File

@ -1,32 +0,0 @@
#!/bin/bash -xe
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
venv=${1:-venv}
export UPPER_CONSTRAINTS_FILE=$(pwd)/upper-constraints.txt
rm -f dist/*.whl
tox -e$venv pip install wheel
tox -e$venv python setup.py bdist_wheel
FILES=dist/*.whl
for f in $FILES; do
echo -n "SHA1sum for $f: "
sha1sum $f | awk '{print $1}' | tee $f.sha1
echo -n "MD5sum for $f: "
md5sum $f | awk '{print $1}' | tee $f.md5
done

View File

@ -1,46 +0,0 @@
#!/bin/bash -xe
#
# Copyright 2012 Hewlett-Packard Development Company, L.P.
# Copyright 2013, 2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Retrieve python tarballs/wheels and make detached OpenPGP signatures.
PROJECT=$1
TARBALL_SITE=$2
TAG=$(echo $ZUUL_REF | sed 's/^refs.tags.//')
# Look in the setup.cfg to determine if a package name is specified, but
# fall back on the project name if necessary. Also look in the setup.cfg
# to see if this is a universal wheel or not
DISTNAME=$(/usr/local/jenkins/slave_scripts/pypi-extract-name.py --wheel \
|| echo $PROJECT)
TARBALL=$(/usr/local/jenkins/slave_scripts/pypi-extract-name.py \
--tarball || echo $PROJECT)-${TAG}.tar.gz
WHEEL=$(/usr/local/jenkins/slave_scripts/pypi-extract-name.py \
--wheel || echo $PROJECT)-${TAG}-$( \
/usr/local/jenkins/slave_scripts/pypi-extract-universal.py || \
true)-none-any.whl
rm -rf *.asc *.tar.gz *.whl
curl --fail -o $TARBALL https://${TARBALL_SITE}/${PROJECT}/${TARBALL}
file -b $TARBALL | grep gzip # Make sure we actually got a tarball
gpg --armor --detach-sign $TARBALL
# Wheels are not mandatory, so only sign if we have one
if curl --fail -o $WHEEL https://${TARBALL_SITE}/${PROJECT}/${WHEEL}; then
file -b $WHEEL | grep -i zip # Make sure we actually got a wheel
gpg --armor --detach-sign $WHEEL
fi

View File

@ -1,200 +0,0 @@
#!/usr/bin/python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# tardiff.py -- compare the tar package with git archive. Error out if
# it's different. The files to exclude are stored in a file, one per line,
# and it's passed as argument to this script.
#
# You should run this script from the project directory. For example, if
# you are verifying the package for glance project, you should run this
# script from that directory.
import getopt
import sys
import os
import commands
class OpenStackTarDiff:
""" main class to verify tar generated in each openstack projects """
def __init__(self):
self.init_vars()
self.validate_args()
self.check_env()
def check_env(self):
""" exit if dist/ directory already exists """
if not self.package and os.path.exists(self.dist_dir):
self.error(
"dist directory '%s' exist. Please remove it before "
"running this script" % self.dist_dir)
def validate_args(self):
try:
opts = getopt.getopt(sys.argv[1:], 'hvp:e:',
['help', 'verbose', 'package=',
'exclude='])[0]
except getopt.GetoptError:
self.usage('invalid option selected')
for opt, value in opts:
if (opt in ('-h', '--help')):
self.usage()
elif (opt in ('-e', '--exclude')):
self.e_file = value
elif (opt in ('-p', '--package')):
self.package = value
elif (opt in ('-v', '--verbose')):
self.verbose = True
else:
self.usage('unknown option : ' + opt)
if not self.e_file:
self.usage('specify file name containing list of files to '
'exclude in tar diff')
if not os.path.exists(self.e_file):
self.usage("file '%s' does not exist" % self.e_file)
if self.package and not os.path.exists(self.package):
self.usage("package '%s' specified, but does not "
"exist" % self.package)
def init_vars(self):
self.dist_dir = 'dist/'
self.verbose = False
self.e_file = None
self.project_name = None
self.prefix = None
self.package = None
self.sdist_files = []
self.exclude_files = []
self.git_files = []
self.missing_files = []
def verify(self):
self.get_exclude_files()
self.get_project_name()
self.get_sdist_files()
self.prefix = self.sdist_files[0]
self.get_git_files()
for file in self.git_files:
if os.path.basename(file) in self.exclude_files:
self.debug("excluding file '%s'" % file)
continue
if file not in self.sdist_files:
self.missing_files.append(file)
else:
# self.debug("file %s matches" % file)
pass
if len(self.missing_files) > 0:
self.error("files missing in package: %s" % self.missing_files)
print("SUCCESS: Generated package '%s' is valid" % self.package)
def get_project_name(self):
""" get git project name """
self.project_name = os.path.basename(os.path.abspath(os.curdir))
def get_exclude_files(self):
""" read the file and get file list """
fh = open(self.e_file, 'r')
content = fh.readlines()
fh.close()
self.debug("files to exclude: %s" % content)
# remove trailing new lines.
self.exclude_files = [x.strip() for x in content]
def get_git_files(self):
""" read file list from git archive """
git_tar = os.path.join(os.getcwd(), '%s.tar' % self.project_name)
try:
a_cmd = ("git archive -o %s HEAD --prefix=%s" %
(git_tar, self.prefix))
self.debug("executing command '%s'" % a_cmd)
(status, out) = commands.getstatusoutput(a_cmd)
if status != 0:
self.debug("command '%s' returned status '%s'" %
(a_cmd, status))
if os.path.exists(git_tar):
os.unlink(git_tar)
self.error('git archive failed: %s' % out)
except Exception as err:
if os.path.exists(git_tar):
os.unlink(git_tar)
self.error('git archive failed: %s' % err)
try:
tar_cmd = "tar tf %s" % git_tar
self.debug("executing command '%s'" % tar_cmd)
(status, out) = commands.getstatusoutput(tar_cmd)
if status != 0:
self.error('invalid tar file: %s' % git_tar)
self.git_files = out.split('\n')
self.debug("Removing git archive ... %s ..." % git_tar)
os.remove(git_tar)
except Exception as err:
self.error('unable to read tar: %s' % err)
def get_sdist_files(self):
""" create package for project and get file list in it"""
if not self.package:
try:
sdist_cmd = "python setup.py sdist"
self.debug("executing command '%s'" % sdist_cmd)
(status, out) = commands.getstatusoutput(sdist_cmd)
if status != 0:
self.error("command '%s' failed" % sdist_cmd)
except Exception as err:
self.error("command '%s' failed" % (sdist_cmd, err))
self.package = os.listdir(self.dist_dir)[0]
self.package = os.path.join(self.dist_dir, self.package)
tar_cmd = "tar tzf %s" % self.package
try:
self.debug("executing command '%s'" % tar_cmd)
(status, out) = commands.getstatusoutput(tar_cmd)
if status != 0:
self.error("command '%s' failed" % tar_cmd)
# self.debug(out)
self.sdist_files = out.split('\n')
except Exception as err:
self.error("command '%s' failed: %s" % (tar_cmd, err))
def debug(self, msg):
if self.verbose:
sys.stdout.write('DEBUG: %s\n' % msg)
def error(self, msg):
sys.stderr.write('ERROR: %s\n' % msg)
sys.exit(1)
def usage(self, msg=None):
if msg:
stream = sys.stderr
else:
stream = sys.stdout
stream.write("usage: %s [--help|h] [-v] "
"[-p|--package=sdist_package.tar.gz] "
"-e|--exclude=filename\n" % os.path.basename(sys.argv[0]))
if msg:
stream.write("\nERROR: " + msg + "\n")
exitCode = 1
else:
exitCode = 0
sys.exit(exitCode)
if __name__ == '__main__':
tardiff = OpenStackTarDiff()
tardiff.verify()