Attempt to use virtualenv for bootstrapping
To avoid anvils dependencies causing conflicts with the prepared, packaged, and installed openstack dependencies use a virtualenv to install said dependencies and then use the python that exists in that environment to run anvil instead of the system python. Change-Id: I6bb7851baa481b768f92bd828a9cec64e60f190d
This commit is contained in:
parent
dead1b6b06
commit
646282b533
@ -135,8 +135,13 @@ class KeystoneRuntime(bruntime.OpenStackRuntime):
|
||||
utils.wait_for_url(url)
|
||||
init_what = utils.load_yaml_text(contents)
|
||||
init_what = utils.expand_template_deep(init_what, params)
|
||||
khelper.Initializer(params['keystone']['service_token'],
|
||||
params['keystone']['endpoints']['admin']['uri']).initialize(**init_what)
|
||||
try:
|
||||
init_how = khelper.Initializer(params['keystone']['service_token'],
|
||||
params['keystone']['endpoints']['admin']['uri'])
|
||||
init_how.initialize(**init_what)
|
||||
except RuntimeError:
|
||||
LOG.exception("Failed to initialize keystone, is the keystone client library available?")
|
||||
else:
|
||||
# Writing this makes sure that we don't init again
|
||||
sh.write_file(self.init_fn, utils.prettify_yaml(init_what))
|
||||
LOG.info("If you wish to re-run initialization, delete %s", colorizer.quote(self.init_fn))
|
||||
|
@ -64,14 +64,12 @@ class DependencyHandler(object):
|
||||
self.download_dir = sh.joinpths(self.deps_dir, "download")
|
||||
self.log_dir = sh.joinpths(self.deps_dir, "output")
|
||||
sh.mkdir(self.log_dir, recurse=True)
|
||||
|
||||
self.gathered_requires_filename = sh.joinpths(self.deps_dir, "pip-requires")
|
||||
self.forced_requires_filename = sh.joinpths(self.deps_dir, "forced-requires")
|
||||
self.download_requires_filename = sh.joinpths(self.deps_dir, "download-requires")
|
||||
# Executables we require to operate
|
||||
self.multipip_executable = sh.which("multipip", ["tools/"])
|
||||
self.pip_executable = sh.which_first(['pip-python', 'pip'])
|
||||
self.pipdownload_executable = sh.which("pip-download", ["tools/"])
|
||||
self.pip_executable = sh.which_first(['pip', 'pip-python'])
|
||||
# List of requirements
|
||||
self.pips_to_install = []
|
||||
self.forced_packages = []
|
||||
@ -90,6 +88,12 @@ class DependencyHandler(object):
|
||||
req_set |= set(pkg["name"]
|
||||
for pkg in inst.get_option(key) or [])
|
||||
self.requirements[key] = req_set
|
||||
# These pip names we will ignore from being converted/analyzed...
|
||||
ignore_pips = self.distro.get_dependency_config("ignoreable_pips", quiet=True)
|
||||
if not ignore_pips:
|
||||
self.ignore_pips = set()
|
||||
else:
|
||||
self.ignore_pips = set(ignore_pips)
|
||||
|
||||
@property
|
||||
def python_names(self):
|
||||
@ -182,7 +186,7 @@ class DependencyHandler(object):
|
||||
contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines))
|
||||
sh.write_file_and_backup(fn, contents)
|
||||
# NOTE(imelnikov): after updating requirement lists we should re-fetch
|
||||
# data from them again, so we drop pip helper caches here:
|
||||
# data from them again, so we drop pip helper caches here.
|
||||
pip_helper.drop_caches()
|
||||
|
||||
def _gather_pips_to_install(self, requires_files, extra_pips=None):
|
||||
@ -199,10 +203,7 @@ class DependencyHandler(object):
|
||||
cmdline = cmdline + extra_pips + ["-r"] + requires_files
|
||||
|
||||
ignore_pip_names = set(self.python_names)
|
||||
more_ignores = self.distro.get_dependency_config('ignore_pip_names',
|
||||
quiet=True)
|
||||
if more_ignores:
|
||||
ignore_pip_names.update([str(n) for n in more_ignores])
|
||||
ignore_pip_names.update(self.ignore_pips)
|
||||
if ignore_pip_names:
|
||||
cmdline.extend(["--ignore-package"])
|
||||
cmdline.extend(ignore_pip_names)
|
||||
@ -255,10 +256,24 @@ class DependencyHandler(object):
|
||||
return self.pips_to_install
|
||||
|
||||
def _try_download_dependencies(self, attempt, pips_to_download, pip_download_dir):
|
||||
# Clean out any previous paths that we don't want around.
|
||||
for path in ['.build']:
|
||||
path = sh.joinpths(pip_download_dir, path)
|
||||
if sh.isdir(path):
|
||||
sh.deldir(path)
|
||||
sh.mkdir(path)
|
||||
# Ensure certain directories exist that we want to exist (but we don't
|
||||
# want to delete them run after run).
|
||||
for path in ['.cache']:
|
||||
path = sh.joinpths(pip_download_dir, path)
|
||||
if not sh.isdir(path):
|
||||
sh.mkdir(path)
|
||||
cmdline = [
|
||||
self.pipdownload_executable,
|
||||
'-d', pip_download_dir,
|
||||
'-v',
|
||||
self.pip_executable, '-v',
|
||||
'install', '-I', '-U',
|
||||
'--download', pip_download_dir,
|
||||
'--build', sh.joinpths(pip_download_dir, '.build'),
|
||||
'--download-cache', sh.joinpths(pip_download_dir, '.cache'),
|
||||
]
|
||||
cmdline.extend(sorted([str(p) for p in pips_to_download]))
|
||||
out_filename = sh.joinpths(self.log_dir,
|
||||
|
@ -122,10 +122,11 @@ class Helper(object):
|
||||
"--source-only",
|
||||
"--rpm-base", self._rpmbuild_dir
|
||||
]
|
||||
executable = " ".join(self._start_cmdline()[0:1])
|
||||
params = {
|
||||
"DOWNLOADS_DIR": self._download_dir,
|
||||
"LOGS_DIR": self._log_dir,
|
||||
"PY2RPM": self._py2rpm_executable,
|
||||
"PY2RPM": executable,
|
||||
"PY2RPM_FLAGS": " ".join(cmdline)
|
||||
}
|
||||
marks_dir = sh.joinpths(self._deps_dir, "marks-deps")
|
||||
|
@ -244,6 +244,7 @@ class YumDependencyHandler(base.DependencyHandler):
|
||||
LOG.info("Copied to %s", system_repo_filename)
|
||||
|
||||
def _get_known_yum_packages(self):
|
||||
LOG.info("Determining which yum packages are available or installed...")
|
||||
yum_map = collections.defaultdict(list)
|
||||
pkgs = []
|
||||
pkgs.extend(self.helper.list_available())
|
||||
@ -307,6 +308,7 @@ class YumDependencyHandler(base.DependencyHandler):
|
||||
# build or can satisfy by other means
|
||||
no_pips = [pkg_resources.Requirement.parse(name).key
|
||||
for name in self.python_names]
|
||||
no_pips.extend(self.ignore_pips)
|
||||
yum_map = self._get_known_yum_packages()
|
||||
pips_keys = set([p.key for p in pips_downloaded])
|
||||
|
||||
|
@ -25,6 +25,8 @@ dependency_handler:
|
||||
qpid-python: python-qpid # Why is this one backwards :-/
|
||||
PyYAML: PyYAML
|
||||
pyzmq: python-zmq
|
||||
ignoreable_pips:
|
||||
- distribute # distribute has been replaced by setuptools
|
||||
arch_dependent:
|
||||
- PuLP
|
||||
- cryptography
|
||||
@ -154,13 +156,6 @@ components:
|
||||
removable: false
|
||||
requires:
|
||||
- name: MySQL-python
|
||||
# These packages conflict with our deps and must be removed
|
||||
conflicts:
|
||||
- name: python-paste-deploy1.5
|
||||
- name: python-nose1.1
|
||||
- name: python-routes1.12
|
||||
- name: python-sphinx10
|
||||
- name: python-webob1.0
|
||||
# Require extra packages needed to run tests
|
||||
pips:
|
||||
- name: "nose"
|
||||
|
@ -39,6 +39,7 @@ Patch$idx: $fn
|
||||
|
||||
BuildArch: noarch
|
||||
BuildRequires: python-setuptools
|
||||
BuildRequires: python-pbr
|
||||
|
||||
Requires: python-cinder = %{epoch}:%{version}-%{release}
|
||||
|
||||
|
@ -38,6 +38,7 @@ BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}
|
||||
BuildArch: noarch
|
||||
BuildRequires: python-devel
|
||||
BuildRequires: python-setuptools
|
||||
BuildRequires: python-pbr
|
||||
|
||||
%if ! 0%{?usr_only}
|
||||
Requires(post): chkconfig
|
||||
|
@ -39,6 +39,7 @@ BuildArch: noarch
|
||||
BuildRequires: python2-devel
|
||||
BuildRequires: python-setuptools
|
||||
BuildRequires: python-sphinx
|
||||
BuildRequires: python-pbr
|
||||
|
||||
Requires: %{name}-common = %{version}-%{release}
|
||||
Requires: %{name}-engine = %{version}-%{release}
|
||||
|
@ -35,6 +35,7 @@ BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}
|
||||
BuildArch: noarch
|
||||
BuildRequires: python-devel
|
||||
BuildRequires: python-setuptools
|
||||
BuildRequires: python-pbr
|
||||
|
||||
%if ! 0%{?usr_only}
|
||||
Requires(post): chkconfig
|
||||
|
@ -52,6 +52,7 @@ BuildArch: noarch
|
||||
|
||||
BuildRequires: python-devel
|
||||
BuildRequires: python-setuptools
|
||||
BuildRequires: python-pbr
|
||||
# Build require these parallel versions
|
||||
# as setup.py build imports neutron.openstack.common.setup
|
||||
# which will then check for these
|
||||
|
@ -56,6 +56,7 @@ BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}
|
||||
BuildArch: noarch
|
||||
BuildRequires: python-devel
|
||||
BuildRequires: python-setuptools
|
||||
BuildRequires: python-pbr
|
||||
|
||||
Requires: %{name}-compute = %{epoch}:%{version}-%{release}
|
||||
Requires: %{name}-cert = %{epoch}:%{version}-%{release}
|
||||
|
@ -36,6 +36,7 @@ BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}
|
||||
BuildArch: noarch
|
||||
BuildRequires: python-devel
|
||||
BuildRequires: python-setuptools
|
||||
BuildRequires: python-pbr
|
||||
|
||||
%if ! 0%{?usr_only}
|
||||
Requires(post): chkconfig
|
||||
|
@ -43,6 +43,7 @@ BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}
|
||||
|
||||
BuildArch: noarch
|
||||
BuildRequires: python-setuptools
|
||||
BuildRequires: python-pbr
|
||||
|
||||
%if 0%{?enable_doc}
|
||||
BuildRequires: python-sphinx
|
||||
|
5
optional-requirements.txt
Normal file
5
optional-requirements.txt
Normal file
@ -0,0 +1,5 @@
|
||||
# Used by internal setup helpers (since we run in a venv we won't have access
|
||||
# to the installed helpers). These will work just fine though for the setup
|
||||
# usage.
|
||||
python-keystoneclient
|
||||
python-glanceclient
|
@ -1,6 +1,7 @@
|
||||
pbr>=0.5.21,<1.0
|
||||
|
||||
argparse
|
||||
# Anvil should not depend on anything that will cause issues when running
|
||||
# egg_info on the downloaded dependencies, pbr seems to be the one case that
|
||||
# currently causes an issue (since it is not directly used for anvil runtime
|
||||
# it is placed in test-requirements for the time being).
|
||||
cheetah>=2.4.4
|
||||
iniparse
|
||||
iso8601>=0.1.8
|
||||
@ -12,3 +13,4 @@ psutil>=0.6.1,<1.0
|
||||
PyYAML>=3.1.0
|
||||
six>=1.4.1
|
||||
termcolor
|
||||
argparse
|
||||
|
@ -31,7 +31,6 @@ packages =
|
||||
|
||||
scripts =
|
||||
tools/multipip
|
||||
tools/pip-download
|
||||
tools/py2rpm
|
||||
tools/specprint
|
||||
tools/yyoom
|
||||
|
430
smithy
430
smithy
@ -8,37 +8,18 @@ SMITHY_NAME=$(readlink -f "$0")
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
VERBOSE="${VERBOSE:-0}"
|
||||
PY2RPM_CMD="$PWD/tools/py2rpm"
|
||||
YYOOM_CMD="$PWD/tools/yyoom"
|
||||
PIPDOWNLOAD_CMD="$PWD/tools/pip-download"
|
||||
|
||||
YUM_OPTS="--assumeyes --nogpgcheck"
|
||||
YYOOM_OPTS="--verbose"
|
||||
RPM_OPTS=""
|
||||
CURL_OPTS=""
|
||||
|
||||
# Colors supported??
|
||||
COLOR_SUPPORT=$(tput colors)
|
||||
|
||||
if [ "$COLOR_SUPPORT" -ge 8 ]; then
|
||||
ESC_SEQ="\x1b["
|
||||
COL_RESET=$ESC_SEQ"39;49;00m"
|
||||
COL_GREEN=$ESC_SEQ"32;01m"
|
||||
COL_RED=$ESC_SEQ"31;01m"
|
||||
COL_YELLOW=$ESC_SEQ"33;01m"
|
||||
else
|
||||
ESC_SEQ=""
|
||||
COL_RESET=""
|
||||
COL_GREEN=""
|
||||
COL_RED=""
|
||||
COL_YELLOW=""
|
||||
fi
|
||||
VENV_OPTS="--no-site-packages"
|
||||
VENV_DIR="$PWD/.venv"
|
||||
VENV_ACTIVATE="$VENV_DIR/bin/activate"
|
||||
PIP="$VENV_DIR/bin/pip"
|
||||
YYOOM_CMD="$PWD/tools/yyoom"
|
||||
|
||||
if [ "$VERBOSE" == "0" ]; then
|
||||
YUM_OPTS="$YUM_OPTS -q"
|
||||
YYOOM_OPTS=""
|
||||
RPM_OPTS="-q"
|
||||
CURL_OPTS="-s"
|
||||
VENV_OPTS="$VENV_OPTS -q"
|
||||
fi
|
||||
|
||||
# Source in our variables (or overrides)
|
||||
@ -53,50 +34,6 @@ if [ -n "$SUDO_USER" ]; then
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$BOOT_FILES" ]; then
|
||||
BOOT_FN=".anvil_bootstrapped"
|
||||
BOOT_FILES="${PWD}/$BOOT_FN"
|
||||
fi
|
||||
|
||||
clean_pip()
|
||||
{
|
||||
# https://github.com/pypa/pip/issues/982
|
||||
if [ -n "$SUDO_USER" ]; then
|
||||
rm -rf "/tmp/pip-build-$SUDO_USER"
|
||||
fi
|
||||
}
|
||||
|
||||
rpm_is_installed()
|
||||
{
|
||||
local name=$(basename "$1")
|
||||
rpm $RPM_OPTS "${name%.rpm}" &>/dev/null
|
||||
return $?
|
||||
}
|
||||
|
||||
cache_and_install_rpm_url()
|
||||
{
|
||||
url=${1:?"Error: rpm uri is undefined!"}
|
||||
cachedir=${RPM_CACHEDIR:-'/tmp'}
|
||||
rpm=$(basename "$url")
|
||||
if rpm_is_installed "$rpm"; then
|
||||
return
|
||||
fi
|
||||
if [ ! -f "$cachedir/$rpm" ]; then
|
||||
echo -e "Downloading ${COL_GREEN}${rpm}${COL_RESET} to ${COL_GREEN}${cachedir}${COL_RESET}"
|
||||
curl $CURL_OPTS "$url" -o "$cachedir/$rpm" || return 1
|
||||
fi
|
||||
echo -e "Installing ${COL_GREEN}$cachedir/$rpm${COL_RESET}"
|
||||
yum install $YUM_OPTS "$cachedir/$rpm"
|
||||
return $?
|
||||
}
|
||||
|
||||
bootstrap_epel()
|
||||
{
|
||||
[ -z "$EPEL_RPM_URL" ] && return 0
|
||||
cache_and_install_rpm_url "$EPEL_RPM_URL"
|
||||
return $?
|
||||
}
|
||||
|
||||
dump_list()
|
||||
{
|
||||
for var in "$@"; do
|
||||
@ -106,6 +43,176 @@ dump_list()
|
||||
done
|
||||
}
|
||||
|
||||
yum_install()
|
||||
{
|
||||
local requires=$@
|
||||
if [ "$VERBOSE" == "0" ]; then
|
||||
yum install $YUM_OPTS $requires > /dev/null 2>&1
|
||||
else
|
||||
yum install $YUM_OPTS $requires
|
||||
fi
|
||||
return $?
|
||||
}
|
||||
|
||||
yum_remove()
|
||||
{
|
||||
local remove=$@
|
||||
if [ "$VERBOSE" == "0" ]; then
|
||||
yum remove $YUM_OPTS $remove > /dev/null 2>&1
|
||||
else
|
||||
yum remove $YUM_OPTS $remove
|
||||
fi
|
||||
return $?
|
||||
}
|
||||
|
||||
bootstrap_rpm_packages()
|
||||
{
|
||||
if [ -n "$REQUIRES" ]; then
|
||||
echo -e "Installing system packages:"
|
||||
dump_list $REQUIRES
|
||||
echo "Please wait..."
|
||||
yum_install $REQUIRES
|
||||
if [ "$?" != "0" ]; then
|
||||
echo -e "Failed installing!"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
if [ -n "$CONFLICTS" ]; then
|
||||
echo -e "Removing conflicting system packages:"
|
||||
dump_list $CONFLICTS
|
||||
echo "Please wait..."
|
||||
yum_remove $CONFLICTS
|
||||
if [ "$?" != "0" ]; then
|
||||
echo -e "Failed removing!"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
clean_pip()
|
||||
{
|
||||
# See: https://github.com/pypa/pip/issues/982
|
||||
if [ -n "$SUDO_USER" ]; then
|
||||
rm -rf "/tmp/pip-build-$SUDO_USER"
|
||||
fi
|
||||
}
|
||||
|
||||
bootstrap_epel()
|
||||
{
|
||||
# Installs the repository that will allow for installation of packages
|
||||
# from epel, see https://fedoraproject.org/wiki/EPEL for information
|
||||
# about what is epel.
|
||||
[ -z "$EPEL_RPM_URL" ] && return 0
|
||||
echo "Installing epel rpm from $EPEL_RPM_URL"
|
||||
cache_and_install_rpm_url "$EPEL_RPM_URL"
|
||||
return $?
|
||||
}
|
||||
|
||||
unsudo()
|
||||
{
|
||||
# If a sudo user is active the given files/directories will be changed to
|
||||
# be owned by that user instead of the current root user, if no sudo user
|
||||
# is active, then nothing changes.
|
||||
if [ -n "$SUDO_UID" -a -n "$SUDO_GID" ]; then
|
||||
if [ "$VERBOSE" == "0" ]; then
|
||||
chown -R "$SUDO_UID:$SUDO_GID" $@
|
||||
else
|
||||
chown -R -c "$SUDO_UID:$SUDO_GID" $@
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
bootstrap_virtualenv()
|
||||
{
|
||||
# Creates a virtualenv and then installs anvils requirements in it.
|
||||
echo "Setting up virtualenv in $VENV_DIR"
|
||||
virtualenv $VENV_OPTS "$VENV_DIR" || return 1
|
||||
unsudo $VENV_DIR
|
||||
local deps=$(cat requirements.txt optional-requirements.txt | grep -v '^$\|^\s*\#' | sort)
|
||||
if [ -n "$deps" ]; then
|
||||
echo "Installing anvil dependencies in $VENV_DIR"
|
||||
dump_list $deps
|
||||
echo "Please wait..."
|
||||
if [ "$VERBOSE" == "0" ]; then
|
||||
$PIP install -r requirements.txt -r optional-requirements.txt > /dev/null 2>&1
|
||||
else
|
||||
$PIP install -v -r requirements.txt -r optional-requirements.txt
|
||||
fi
|
||||
if [ "$?" != "0" ]; then
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
unsudo $VENV_DIR
|
||||
}
|
||||
|
||||
bootstrap_selinux()
|
||||
{
|
||||
# See if selinux is on.
|
||||
echo "Enabling selinux for yum like binaries."
|
||||
if [ "$(getenforce)" == "Enforcing" ]; then
|
||||
# Ensure all yum api interacting binaries are ok to be used.
|
||||
chcon -h "system_u:object_r:rpm_exec_t:s0" "$YYOOM_CMD"
|
||||
fi
|
||||
}
|
||||
|
||||
run_smithy()
|
||||
{
|
||||
source "$VENV_ACTIVATE"
|
||||
local python=$(which python)
|
||||
exec "$python" anvil $ARGS
|
||||
}
|
||||
|
||||
puke()
|
||||
{
|
||||
cleaned_force=$(echo "$FORCE" | sed -e 's/\([A-Z]\)/\L\1/g;s/\s//g')
|
||||
if [ "$cleaned_force" == "yes" ]; then
|
||||
run_smithy
|
||||
else
|
||||
echo -e "To run anyway set FORCE=yes and rerun." >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
needs_bootstrap()
|
||||
{
|
||||
# Checks if we need to perform the bootstrap phase.
|
||||
if [ "$BOOTSTRAP" == "true" ]; then
|
||||
return 0
|
||||
fi
|
||||
if [ ! -d "$VENV_DIR" -o ! -f "$VENV_ACTIVATE" -o ! -f "$PIP" ]; then
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
rpm_is_installed()
|
||||
{
|
||||
# Checks if an rpm is already installed.
|
||||
local name=$(basename "$1")
|
||||
rpm -q "${name%.rpm}" &>/dev/null
|
||||
return $?
|
||||
}
|
||||
|
||||
cache_and_install_rpm_url()
|
||||
{
|
||||
# Downloads an rpm from a url and then installs it (if it's not already
|
||||
# installed).
|
||||
url=${1:?"Error: rpm url is undefined!"}
|
||||
cachedir=${RPM_CACHEDIR:-'/tmp'}
|
||||
rpm=$(basename "$url")
|
||||
if rpm_is_installed "$rpm"; then
|
||||
return 0
|
||||
fi
|
||||
if [ ! -f "$cachedir/$rpm" ]; then
|
||||
echo -e "Downloading ${rpm} to ${cachedir}"
|
||||
curl $CURL_OPTS "$url" -o "$cachedir/$rpm" || return 1
|
||||
fi
|
||||
echo -e "Installing $cachedir/$rpm"
|
||||
yum_install "$cachedir/$rpm"
|
||||
return $?
|
||||
}
|
||||
|
||||
greatest_version()
|
||||
{
|
||||
for arg in "$@"; do
|
||||
@ -113,169 +220,11 @@ greatest_version()
|
||||
done | sort --version-sort --reverse | head -n1
|
||||
}
|
||||
|
||||
bootstrap_rpm_packages()
|
||||
{
|
||||
# NOTE(aababilov): the latter operations require some packages,
|
||||
# so, begin from installation
|
||||
if [ -n "$REQUIRES" ]; then
|
||||
echo -e "Installing ${COL_GREEN}system${COL_RESET} packages:"
|
||||
dump_list $REQUIRES
|
||||
yum install $YUM_OPTS $REQUIRES
|
||||
if [ "$?" != "0" ]; then
|
||||
echo -e "${COL_RED}Failed installing!${COL_RESET}"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Remove any known conflicting packages
|
||||
CONFLICTS=$(python -c "import yaml
|
||||
packages = set()
|
||||
try:
|
||||
for i in yaml.safe_load(open('$DISTRO_CONFIG'))['components'].itervalues():
|
||||
for j in i.get('conflicts', []):
|
||||
packages.add(j.get('name'))
|
||||
except KeyError:
|
||||
pass
|
||||
for pkg in packages:
|
||||
if pkg:
|
||||
print pkg
|
||||
")
|
||||
if [ -n "$CONFLICTS" ]; then
|
||||
echo -e "Removing ${COL_YELLOW}conflicting${COL_RESET} packages:"
|
||||
dump_list "$CONFLICTS"
|
||||
yum erase $YUM_OPTS $CONFLICTS
|
||||
fi
|
||||
}
|
||||
|
||||
bootstrap_selinux()
|
||||
{
|
||||
# See if selinux is on.
|
||||
if [ "$(getenforce)" == "Enforcing" ]; then
|
||||
# Ensure all yum api interacting binaries are ok to be used
|
||||
echo "Enabling selinux for yum like binaries."
|
||||
chcon -h "system_u:object_r:rpm_exec_t:s0" "$YYOOM_CMD"
|
||||
fi
|
||||
}
|
||||
|
||||
bootstrap_python_rpms()
|
||||
{
|
||||
echo -e "Bootstrapping ${COL_GREEN}python${COL_RESET} rpms."
|
||||
local package_map=$(python -c "import yaml
|
||||
try:
|
||||
for k, v in yaml.safe_load(open('$DISTRO_CONFIG'))['dependency_handler']['package_map'].iteritems():
|
||||
print '%s==%s' % (k, v)
|
||||
except KeyError:
|
||||
pass
|
||||
")
|
||||
local python_names=$(sed -r -e 's/#.*$//' requirements.txt | sort -u)
|
||||
local bootstrap_dir=$(readlink -f ./.bootstrap/)
|
||||
local transaction_cmd="transaction --skip-missing"
|
||||
local install_packages=""
|
||||
declare -A rpm_python_map
|
||||
for python_name in $python_names; do
|
||||
local specs=$(echo "$python_name" | awk 'match($0, "((=|>|<|!).*$)", res) {print res[1]}')
|
||||
local rpm_name=$("$PY2RPM_CMD" --package-map $package_map --convert "$python_name" |
|
||||
awk '/^Requires:/ {print $2; exit }')
|
||||
rpm_python_map["$rpm_name"]="$python_name"
|
||||
install_packages="$install_packages $rpm_name$specs"
|
||||
transaction_cmd+=" --install $rpm_name$specs"
|
||||
done
|
||||
|
||||
echo -e "Installing ${COL_GREEN}python${COL_RESET} requirements:"
|
||||
dump_list "$install_packages"
|
||||
|
||||
# NOTE(imelnikov): if we declare local variable and specify its value
|
||||
# at the same statement, exit code from subshell is lost.
|
||||
local yyoom_res
|
||||
yyoom_res=$("$YYOOM_CMD" $YYOOM_OPTS $transaction_cmd) || return 1
|
||||
local missing_rpms
|
||||
missing_rpms=$(echo "$yyoom_res" | python -c "import sys, json
|
||||
for item in json.load(sys.stdin):
|
||||
if item.get('action_type') == 'missing':
|
||||
print(item['name'])
|
||||
") || return 2
|
||||
local missing_packages=""
|
||||
for rpm in $missing_rpms; do
|
||||
missing_packages="$missing_packages ${rpm_python_map[$rpm]}"
|
||||
done
|
||||
|
||||
if [ -z "$missing_packages" ]; then
|
||||
return 0
|
||||
fi
|
||||
echo -e "Building ${COL_YELLOW}missing${COL_RESET} python requirements:"
|
||||
dump_list "$missing_packages"
|
||||
local pip_tmp_dir="$bootstrap_dir/pip-download"
|
||||
mkdir -p "$pip_tmp_dir"
|
||||
|
||||
$PIPDOWNLOAD_CMD -d "$pip_tmp_dir" $missing_packages
|
||||
echo "Building RPMs..."
|
||||
local rpm_names
|
||||
rpm_names=$("$PY2RPM_CMD" --package-map $package_map --scripts-dir "conf/templates/packaging/scripts" --rpm-base "$bootstrap_dir/rpmbuild" -- "$pip_tmp_dir/"* 2>/dev/null |
|
||||
awk '/^Wrote: /{ print $2 }' | grep -v '.src.rpm' | sort -u)
|
||||
if [ -z "$rpm_names" ]; then
|
||||
echo -e "${COL_RED}No binary RPMs were built!${COL_RESET}"
|
||||
return 1
|
||||
fi
|
||||
local rpm_base_names=""
|
||||
for rpm in $rpm_names; do
|
||||
rpm_base_names="$rpm_base_names $(basename "$rpm")"
|
||||
done
|
||||
echo -e "Installing ${COL_YELLOW}missing${COL_RESET} python requirement packages:"
|
||||
dump_list "$rpm_base_names"
|
||||
yum install $YUM_OPTS $rpm_names
|
||||
if [ "$?" != "0" ]; then
|
||||
echo -e "${COL_RED}Failed installing!${COL_RESET}"
|
||||
return 1
|
||||
fi
|
||||
rm -rf "$pip_tmp_dir"
|
||||
rm -rf "$bootstrap_dir/rpmbuild/"{BUILD,SOURCES,SPECS,BUILDROOT}
|
||||
return 0
|
||||
}
|
||||
|
||||
needs_bootstrap()
|
||||
{
|
||||
local contents checksums
|
||||
$BOOTSTRAP && return 0
|
||||
checksums=$(get_checksums)
|
||||
for i in $BOOT_FILES; do
|
||||
if [ -f "$i" ]; then
|
||||
contents=$(cat $i)
|
||||
[ "$contents" = "$checksums" ] && return 1
|
||||
fi
|
||||
done
|
||||
return 0
|
||||
}
|
||||
|
||||
get_checksums()
|
||||
{
|
||||
if [ ! -f "$BSCONF_FILE" ]; then
|
||||
return 1
|
||||
fi
|
||||
# Used to tell if the file have changed
|
||||
echo $(md5sum "$BSCONF_FILE")
|
||||
}
|
||||
|
||||
run_smithy()
|
||||
{
|
||||
PYTHON=$(which python)
|
||||
exec "$PYTHON" anvil $ARGS
|
||||
}
|
||||
|
||||
puke()
|
||||
{
|
||||
cleaned_force=$(echo "$FORCE" | sed -e 's/\([A-Z]\)/\L\1/g;s/\s//g')
|
||||
if [[ "$cleaned_force" == "yes" ]]; then
|
||||
run_smithy
|
||||
else
|
||||
echo -e "To run anyway set ${COL_YELLOW}FORCE=yes${COL_RESET} and rerun." >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
## Identify which bootstrap configuration file to use: either set
|
||||
## explicitly (BSCONF_FILE) or determined based on the os distribution:
|
||||
BSCONF_DIR="${BSCONF_DIR:-$(dirname $(readlink -f "$0"))/tools/bootstrap}"
|
||||
get_os_info(){
|
||||
get_os_info()
|
||||
{
|
||||
if [ "$(uname)" = "Linux" ] ; then
|
||||
if [ -f /etc/redhat-release ] ; then
|
||||
PKG="rpm"
|
||||
@ -325,13 +274,14 @@ done
|
||||
# Source immediately so that we can export the needed variables.
|
||||
if [ -f "$BSCONF_FILE" ]; then
|
||||
source "$BSCONF_FILE"
|
||||
export REQUIRED_PACKAGES="$REQUIRES"
|
||||
fi
|
||||
|
||||
if ! needs_bootstrap; then
|
||||
clean_pip
|
||||
run_smithy
|
||||
elif ! $BOOTSTRAP; then
|
||||
fi
|
||||
|
||||
if [ "$BOOTSTRAP" == "false" ]; then
|
||||
echo "This system needs to be updated in order to run anvil!" >&2
|
||||
echo "Running 'sudo $SMITHY_NAME --bootstrap' will attempt to do so." >&2
|
||||
puke
|
||||
@ -359,6 +309,7 @@ echo "Bootstrapping $SHORTNAME $RELEASE"
|
||||
echo "Please wait..."
|
||||
clean_pip
|
||||
for step in ${STEPS:?"Error: STEPS is undefined!"}; do
|
||||
echo "--- Running bootstrap step $step ---"
|
||||
"bootstrap_${step}"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Bootstrapping $SHORTNAME $RELEASE failed." >&2
|
||||
@ -367,21 +318,12 @@ for step in ${STEPS:?"Error: STEPS is undefined!"}; do
|
||||
done
|
||||
clean_pip
|
||||
|
||||
# Write the checksums of the bootstrap file
|
||||
# which if new requirements are added will cause new checksums
|
||||
# and a new dependency install...
|
||||
checksum=$(get_checksums)
|
||||
for i in $BOOT_FILES; do
|
||||
echo -e "$checksum" > "$i"
|
||||
done
|
||||
|
||||
# Anvil writes configurations in these locations, make sure they are created
|
||||
# and that the user running this script can actually access those files (even
|
||||
# later if they are not running with sudo).
|
||||
mkdir -p -v /etc/anvil /usr/share/anvil
|
||||
touch /var/log/anvil.log
|
||||
if [ -n "$SUDO_UID" -a -n "SUDO_GID" ]; then
|
||||
chown -c "$SUDO_UID:$SUDO_GID" /etc/anvil /usr/share/anvil \
|
||||
/var/log/anvil.log
|
||||
[ -d .bootstrap ] && chown -R "$SUDO_UID:$SUDO_GID" .bootstrap
|
||||
fi
|
||||
unsudo /etc/anvil /usr/share/anvil /var/log/anvil.log
|
||||
|
||||
echo "Bootstrapped for $SHORTNAME $RELEASE"
|
||||
ARGS="-a moo"
|
||||
|
@ -5,3 +5,12 @@ flake8==2.0
|
||||
pylint==0.25.2
|
||||
hacking>=0.8.0,<0.9
|
||||
mock>=1.0
|
||||
# This is only needed for running anvils setup.py which is not done
|
||||
# during running of anvil, but is done during testing or package
|
||||
# creation...
|
||||
#
|
||||
# Using it in the created virtualenv causes conflict with
|
||||
# other openstack packages (running there setup.py egg_info will break)
|
||||
# so placing it here means that we will avoid such conflicts in the first
|
||||
# place.
|
||||
pbr>=0.5.21,<1.0
|
||||
|
@ -1,5 +1,5 @@
|
||||
# -*- sh -*-
|
||||
STEPS="selinux epel rpm_packages python_rpms"
|
||||
STEPS="selinux epel rpm_packages virtualenv"
|
||||
EPEL_RPM_URL="http://mirrors.kernel.org/fedora-epel/6/i386/epel-release-6-8.noarch.rpm"
|
||||
DISTRO_CONFIG=conf/distros/rhel.yaml
|
||||
|
||||
@ -18,11 +18,16 @@ REQUIRES='
|
||||
createrepo
|
||||
yum-utils
|
||||
|
||||
PyYAML
|
||||
|
||||
rpm-build
|
||||
python-pip
|
||||
python-virtualenv
|
||||
python-argparse
|
||||
python-setuptools
|
||||
python-six
|
||||
'
|
||||
CONFLICTS='
|
||||
python-paste-deploy1.5
|
||||
python-nose1.1
|
||||
python-routes1.12
|
||||
python-sphinx10
|
||||
python-webob1.0
|
||||
'
|
||||
|
@ -1,228 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import distutils
|
||||
import glob
|
||||
import optparse
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import thread
|
||||
import threading
|
||||
import time
|
||||
|
||||
from pip import download as pip_down
|
||||
from pip import req as pip_req
|
||||
from pip import util as pip_util
|
||||
|
||||
PIP_CMDS = (
|
||||
"pip-%s" % (".".join([str(s) for s in sys.version_info[0:2]])),
|
||||
'pip-python',
|
||||
'pip',
|
||||
)
|
||||
ARCHIVE_EXTS = (
|
||||
'.zip', '.tgz', '.tbz',
|
||||
'.tar.gz', '.tar', '.gz', '.bz2',
|
||||
'.pybundle', '.whl',
|
||||
)
|
||||
|
||||
|
||||
class Spinner(threading.Thread):
|
||||
def __init__(self, wait_time=0.15, stream=sys.stdout):
|
||||
super(Spinner, self).__init__()
|
||||
self.daemon = True
|
||||
self.death_event = threading.Event()
|
||||
self.wait_time = float(wait_time)
|
||||
self.stream = stream
|
||||
|
||||
def run(self):
|
||||
if not self.stream.isatty():
|
||||
tpl = "%s"
|
||||
selectors = (".",)
|
||||
else:
|
||||
tpl = "\b%s"
|
||||
selectors = ('-', '\\', '|', '/')
|
||||
try:
|
||||
index = 0
|
||||
has_output_something = False
|
||||
selector_count = len(selectors)
|
||||
while not self.death_event.is_set():
|
||||
self.stream.write(tpl % selectors[index])
|
||||
self.stream.flush()
|
||||
has_output_something = True
|
||||
if selector_count > 1:
|
||||
index = (index + 1) % selector_count
|
||||
time.sleep(self.wait_time)
|
||||
if has_output_something:
|
||||
self.stream.write("\n")
|
||||
self.stream.flush()
|
||||
except KeyboardInterrupt:
|
||||
thread.interrupt_main()
|
||||
|
||||
|
||||
def call(cmd):
|
||||
proc = subprocess.Popen(cmd, stderr=None, stdin=None, stdout=None)
|
||||
ret = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError("Failed running command: %s\n"
|
||||
"Exit code: %s" % (" ".join(cmd), proc.returncode))
|
||||
return ret
|
||||
|
||||
|
||||
def find_pip():
|
||||
for pp in PIP_CMDS:
|
||||
bin_name = distutils.spawn.find_executable(pp)
|
||||
if bin_name:
|
||||
return bin_name
|
||||
raise RuntimeError("Unable to find pip via any of %s commands"
|
||||
% list(PIP_CMDS))
|
||||
|
||||
|
||||
def execute_download(options, deps, download_dir, cache_dir, build_dir):
|
||||
cmd = [find_pip()]
|
||||
if options.verbose:
|
||||
cmd.extend(['-v'])
|
||||
else:
|
||||
cmd.extend(['-q'])
|
||||
cmd.extend(['install', '-I', '-U',
|
||||
'--download', download_dir,
|
||||
'--build', build_dir,
|
||||
'--download-cache', cache_dir])
|
||||
cmd.extend([str(d) for d in deps])
|
||||
call(cmd)
|
||||
|
||||
|
||||
def remove_archive_extensions(path):
|
||||
for i in ARCHIVE_EXTS:
|
||||
if path.endswith(i):
|
||||
path = path[0:-len(i)]
|
||||
return path
|
||||
|
||||
|
||||
def extract_requirement(source_dir):
|
||||
# Remove old egg-infos since it appears that pip will not work if there
|
||||
# are previous egg-info directories existing in the source directory.
|
||||
source_dir = os.path.abspath(source_dir)
|
||||
for egg_d in glob.glob(os.path.join(source_dir, "*.egg-info")):
|
||||
pip_util.rmtree(egg_d)
|
||||
req = pip_req.InstallRequirement.from_line(source_dir)
|
||||
req.source_dir = source_dir
|
||||
req.run_egg_info()
|
||||
return req
|
||||
|
||||
|
||||
def examine_file(filename, extract_dir):
|
||||
if not os.path.isfile(filename):
|
||||
raise IOError("Can not extract missing file: %s" % (filename))
|
||||
if not os.path.isdir(extract_dir):
|
||||
raise IOError("Can not extract to missing dir: %s" % (extract_dir))
|
||||
basename = os.path.basename(filename)
|
||||
untar_dir = os.path.join(extract_dir, remove_archive_extensions(basename))
|
||||
if os.path.isdir(untar_dir):
|
||||
pip_util.rmtree(untar_dir)
|
||||
pip_util.unpack_file(filename, untar_dir, content_type='', link='')
|
||||
return extract_requirement(untar_dir)
|
||||
|
||||
|
||||
def iter_archives_in(base_dir):
|
||||
for basename in os.listdir(base_dir):
|
||||
if basename.startswith("."):
|
||||
continue
|
||||
if not pip_down.is_archive_file(basename):
|
||||
continue
|
||||
filename = os.path.join(base_dir, basename)
|
||||
if not os.path.isfile(filename):
|
||||
continue
|
||||
yield filename
|
||||
|
||||
|
||||
def perform_download(options, deps,
|
||||
download_dir, extract_dir, cache_dir, build_dir):
|
||||
execute_download(options, deps, download_dir, cache_dir, build_dir)
|
||||
files_examined = {}
|
||||
for filename in iter_archives_in(download_dir):
|
||||
files_examined[filename] = examine_file(filename, extract_dir)
|
||||
return files_examined
|
||||
|
||||
|
||||
def print_header(text):
|
||||
if not text:
|
||||
return
|
||||
print("-" * len(text))
|
||||
print(text)
|
||||
print("-" * len(text))
|
||||
|
||||
|
||||
def main():
|
||||
usage = "usage: %prog [options] req req2 ..."
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
parser.add_option("-d", action="store", dest="download_dir",
|
||||
help='directory to download dependencies too',
|
||||
metavar="DIR")
|
||||
parser.add_option("-n", "--no-wipe", action="store_false", dest="wipe",
|
||||
help='do not clear old downloaded items', default=True)
|
||||
parser.add_option("-v", '--verbose', action="store_true",
|
||||
help='enable verbose output',
|
||||
dest="verbose", default=False)
|
||||
(options, packages) = parser.parse_args()
|
||||
|
||||
download_dir = options.download_dir
|
||||
if not options.download_dir:
|
||||
parser.error("Download directory required")
|
||||
if not packages:
|
||||
parser.error("Download requirement/s expected")
|
||||
|
||||
# Clear out the build & extraction directory if it exists so we don't
|
||||
# conflict with previous build or extraction attempts.
|
||||
extract_dir = os.path.join(download_dir, '.extract')
|
||||
cache_dir = os.path.join(download_dir, '.cache')
|
||||
build_dir = os.path.join(download_dir, '.build')
|
||||
for d in (extract_dir, build_dir):
|
||||
if os.path.isdir(d):
|
||||
pip_util.rmtree(d)
|
||||
for d in (download_dir, extract_dir, cache_dir, build_dir):
|
||||
if not os.path.isdir(d):
|
||||
os.makedirs(d)
|
||||
|
||||
# Clear out the previously existing files so that we don't have conflicts
|
||||
# with the files we are about to download.
|
||||
if options.wipe:
|
||||
existing_files = list(iter_archives_in(download_dir))
|
||||
if existing_files:
|
||||
print_header("Cleaning")
|
||||
for path in sorted(existing_files):
|
||||
print(" - %s (X)" % (os.path.basename(path)))
|
||||
os.unlink(path)
|
||||
|
||||
print_header("Downloading")
|
||||
s = Spinner()
|
||||
s.start()
|
||||
downloaded = perform_download(options, list(packages),
|
||||
download_dir, extract_dir, cache_dir,
|
||||
build_dir)
|
||||
s.death_event.set()
|
||||
s.join()
|
||||
|
||||
print_header("Saved")
|
||||
for filename in sorted(downloaded.keys()):
|
||||
print(" - %s (%s)" % (os.path.basename(filename),
|
||||
downloaded[filename].req))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
Loading…
Reference in New Issue
Block a user