Build RPMs and use YUM to handle dependencies

Implements: blueprint robust-dependencies

Fixes: bug #1157871
Fixes: bug #1184016
Fixes: bug #1184017

Change-Id: I4d1e6d4b1b32473182259d60b1db6918cba889e9
This commit is contained in:
Alessio Ababilov 2013-05-23 10:19:06 +04:00 committed by Joshua Harlow
parent 773581ce79
commit 54d8db1e4d
48 changed files with 1944 additions and 1892 deletions

View File

@ -54,8 +54,13 @@ def run(args):
# Keep the old args around so we have the full set to write out
saved_args = dict(args)
action = args.pop("action", '').strip().lower()
if action not in actions.names():
raise excp.OptionException("Invalid action name %r specified!" % (action))
try:
runner_cls = actions.class_for(action)
except Exception as ex:
raise excp.OptionException(str(ex))
if runner_cls.needs_sudo:
ensure_perms()
persona_fn = args.pop('persona_fn')
if not persona_fn:
@ -98,7 +103,6 @@ def run(args):
raise excp.OptionException("Error loading persona file: %s due to %s" % (persona_fn, e))
# Get the object we will be running with...
runner_cls = actions.class_for(action)
runner = runner_cls(distro=dist,
root_dir=root_dir,
name=action,
@ -216,17 +220,14 @@ def main():
traceback, file=sys.stdout)
try:
ensure_perms()
run(args)
utils.goodbye(True)
return 0
except excp.PermException as e:
print_exc(e)
print(("This program should be running via %s as it performs some root-only commands is it not?")
% (colorizer.quote('sudo', quote_color='red')))
return 2
try:
run(args)
utils.goodbye(True)
return 0
except excp.OptionException as e:
print_exc(e)
print("Perhaps you should try %s" % (colorizer.quote('--help', quote_color='red')))

View File

@ -14,8 +14,8 @@
# License for the specific language governing permissions and limitations
# under the License.
from anvil.actions import prepare
from anvil.actions import install
from anvil.actions import package
from anvil.actions import restart
from anvil.actions import start
from anvil.actions import status
@ -26,8 +26,8 @@ from anvil.actions import uninstall
_NAMES_TO_RUNNER = {
'prepare': prepare.PrepareAction,
'install': install.InstallAction,
'package': package.PackageAction,
'restart': restart.RestartAction,
'start': start.StartAction,
'status': status.StatusAction,

View File

@ -47,6 +47,7 @@ class PhaseFunctors(object):
class Action(object):
__meta__ = abc.ABCMeta
needs_sudo = True
def __init__(self, name, distro, root_dir, cli_opts):
self.distro = distro

View File

@ -16,14 +16,12 @@
from StringIO import StringIO
from anvil.actions import base as action
from anvil import colorizer
from anvil import log
from anvil import pprint
from anvil import shell as sh
from anvil import utils
from anvil.components import base_install as binstall
from anvil.actions import base as action
LOG = log.getLogger(__name__)
@ -61,36 +59,8 @@ class InstallAction(action.Action):
header="Wrote to %s %s exports" % (path, len(entries)),
logger=LOG)
def _analyze_dependencies(self, instance_dependencies):
LOG.debug("Full known dependency list: ")
LOG.debug(pprint.pformat(instance_dependencies))
def _run(self, persona, component_order, instances):
removals = []
self._run_phase(
action.PhaseFunctors(
start=lambda i: LOG.info('Downloading %s.', colorizer.quote(i.name)),
run=lambda i: i.download(),
end=lambda i, result: LOG.info("Performed %s downloads.", len(result))
),
component_order,
instances,
"download",
*removals
)
self._run_phase(
action.PhaseFunctors(
start=lambda i: LOG.info('Post-download patching %s.', colorizer.quote(i.name)),
run=lambda i: i.patch("download"),
end=None,
),
component_order,
instances,
"download-patch",
*removals
)
removals += ['uninstall', 'unconfigure']
removals = ['uninstall', 'unconfigure']
self._run_phase(
action.PhaseFunctors(
start=lambda i: LOG.info('Configuring %s.', colorizer.quote(i.name)),
@ -103,13 +73,6 @@ class InstallAction(action.Action):
*removals
)
if self.only_configure:
# TODO(harlowja) this could really be a new action that
# does the download and configure and let the install
# routine actually do the install steps...
LOG.info("Exiting early, only asked to download and configure!")
return
def preinstall_run(instance):
instance.pre_install()
@ -126,31 +89,6 @@ class InstallAction(action.Action):
*removals
)
all_instance_dependencies = {}
def capture_run(instance):
instance_dependencies = {}
if isinstance(instance, (binstall.PkgInstallComponent)):
instance_dependencies['packages'] = instance.packages
if isinstance(instance, (binstall.PythonInstallComponent)):
instance_dependencies['pips'] = instance.pip_requires
all_instance_dependencies[instance.name] = instance_dependencies
self._run_phase(
action.PhaseFunctors(
start=lambda i: LOG.info('Capturing dependencies of %s.', colorizer.quote(i.name)),
run=capture_run,
end=None,
),
component_order,
instances,
None,
*removals
)
# Do validation on the installed dependency set.
self._analyze_dependencies(all_instance_dependencies)
def install_start(instance):
subsystems = set(list(instance.subsystems))
if subsystems:
@ -166,17 +104,21 @@ class InstallAction(action.Action):
LOG.info("Finished install of %s with result %s.",
colorizer.quote(instance.name), result)
dependency_handler = self.distro.dependency_handler_class(
self.distro, self.root_dir, instances.values())
general_package = "general"
self._run_phase(
action.PhaseFunctors(
start=install_start,
run=lambda i: i.install(),
end=install_finish,
start=lambda i: LOG.info("Installing packages"),
run=lambda i: dependency_handler.install(),
end=None,
),
component_order,
instances,
"install",
[general_package],
{general_package: instances[general_package]},
"package-install",
*removals
)
self._run_phase(
action.PhaseFunctors(
start=lambda i: LOG.info('Post-installing %s.', colorizer.quote(i.name)),

View File

@ -1,48 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from anvil import colorizer
from anvil import log
from anvil.actions import base as action
LOG = log.getLogger(__name__)
class PackageAction(action.Action):
@property
def lookup_name(self):
return 'package'
def _finish_package(self, component, where):
if not where:
LOG.info("Component %s can not create a package.",
colorizer.quote(component.name))
else:
LOG.info("Package created at %s for component %s.",
colorizer.quote(where), colorizer.quote(component.name))
def _run(self, persona, component_order, instances):
self._run_phase(
action.PhaseFunctors(
start=lambda i: LOG.info('Creating a package for component %s.', colorizer.quote(i.name)),
run=lambda i: i.package(),
end=self._finish_package,
),
component_order,
instances,
None,
)

84
anvil/actions/prepare.py Normal file
View File

@ -0,0 +1,84 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# pylint: disable=R0915
from anvil.actions import base as action
from anvil import colorizer
from anvil import log
LOG = log.getLogger(__name__)
class PrepareAction(action.Action):
needs_sudo = False
def __init__(self, name, distro, root_dir, cli_opts):
action.Action.__init__(self, name, distro, root_dir, cli_opts)
@property
def lookup_name(self):
return 'install'
def _run(self, persona, component_order, instances):
removals = []
self._run_phase(
action.PhaseFunctors(
start=lambda i: LOG.info('Downloading %s.', colorizer.quote(i.name)),
run=lambda i: i.download(),
end=lambda i, result: LOG.info("Performed %s downloads.", len(result))
),
component_order,
instances,
"download",
*removals
)
self._run_phase(
action.PhaseFunctors(
start=lambda i: LOG.info('Post-download patching %s.', colorizer.quote(i.name)),
run=lambda i: i.patch("download"),
end=None,
),
component_order,
instances,
"download-patch",
*removals
)
self._run_phase(
action.PhaseFunctors(
start=lambda i: LOG.info('Preparing %s.', colorizer.quote(i.name)),
run=lambda i: i.prepare(),
end=None,
),
component_order,
instances,
"prepare",
*removals
)
dependency_handler = self.distro.dependency_handler_class(
self.distro, self.root_dir, instances.values())
general_package = "general"
self._run_phase(
action.PhaseFunctors(
start=lambda i: LOG.info("Packing OpenStack and its dependencies"),
run=lambda i: dependency_handler.package(),
end=None,
),
[general_package],
{general_package: instances[general_package]},
"package",
*removals
)

View File

@ -50,7 +50,7 @@ class StatusAction(action.Action):
def _print_status(self, component, result):
if not result:
LOG.info("Status of %s is %s.", colorizer.quote(component.name), self._quote_status(STATUS_UNKNOWN))
LOG.info("Status of %s is %s.", colorizer.quote(component.name), self._quote_status(STATUS_INSTALLED))
return
def log_details(text, spacing, max_len):

View File

@ -49,6 +49,8 @@ class Component(object):
# How we get any passwords we need
self.passwords = passwords
self.bin_dir = "/usr/bin"
def get_password(self, option):
pw_val = self.passwords.get(option)
if pw_val is None:

View File

@ -13,9 +13,8 @@
# under the License.
from anvil import colorizer
from anvil import decorators
from anvil.components import base
from anvil import downloader as down
from anvil import exceptions as excp
from anvil import importer
from anvil import log as logging
from anvil import patcher
@ -23,15 +22,8 @@ from anvil import shell as sh
from anvil import trace as tr
from anvil import utils
from anvil.packaging import pip
from anvil.packaging.helpers import pip_helper
from anvil.components import base
from anvil.components.configurators import base as conf
import re
LOG = logging.getLogger(__name__)
# Cache of accessed packagers
@ -52,24 +44,157 @@ def make_packager(package, default_class, **kwargs):
return p
# Remove any private keys from a package dictionary
def filter_package(pkg):
n_pkg = {}
for (k, v) in pkg.items():
if not k or k.startswith("_"):
continue
else:
n_pkg[k] = v
return n_pkg
class PkgInstallComponent(base.Component):
def __init__(self, *args, **kargs):
super(PkgInstallComponent, self).__init__(*args, **kargs)
trace_fn = tr.trace_filename(self.get_option('trace_dir'), 'created')
self.tracewriter = tr.TraceWriter(trace_fn, break_if_there=False)
self.configurator = conf.Configurator(self)
class EmptyPackagingComponent(base.Component):
def package(self):
def _get_download_config(self):
return None
def _get_download_location(self):
key = self._get_download_config()
if not key:
return (None, None)
uri = self.get_option(key, default_value='').strip()
if not uri:
raise ValueError(("Could not find uri in config to download "
"from option %s") % (key))
return (uri, self.get_option('app_dir'))
def download(self):
(from_uri, target_dir) = self._get_download_location()
if not from_uri and not target_dir:
return []
else:
uris = [from_uri]
utils.log_iterable(uris, logger=LOG,
header="Downloading from %s uris" % (len(uris)))
sh.mkdirslist(target_dir, tracewriter=self.tracewriter)
# This is used to delete what is downloaded (done before
# fetching to ensure its cleaned up even on download failures)
self.tracewriter.download_happened(target_dir, from_uri)
fetcher = down.GitDownloader(self.distro, from_uri, target_dir)
fetcher.download()
return uris
def patch(self, section):
what_patches = self.get_option('patches', section)
(_from_uri, target_dir) = self._get_download_location()
if not what_patches:
what_patches = []
canon_what_patches = []
for path in what_patches:
if sh.isdir(path):
canon_what_patches.extend(sorted(sh.listdir(path, files_only=True)))
elif sh.isfile(path):
canon_what_patches.append(path)
if canon_what_patches:
patcher.apply_patches(canon_what_patches, target_dir)
def config_params(self, config_fn):
mp = dict(self.params)
if config_fn:
mp['CONFIG_FN'] = config_fn
return mp
@property
def packages(self):
return self.extended_packages()
def extended_packages(self):
pkg_list = self.get_option('packages', default_value=[])
if not pkg_list:
pkg_list = []
for name, values in self.subsystems.items():
if 'packages' in values:
LOG.debug("Extending package list with packages for subsystem: %r", name)
pkg_list.extend(values.get('packages'))
return pkg_list
def pre_install(self):
pkgs = self.packages
for p in pkgs:
installer = make_packager(p, self.distro.package_manager_class,
distro=self.distro)
installer.pre_install(p, self.params)
def post_install(self):
pkgs = self.packages
for p in pkgs:
installer = make_packager(p, self.distro.package_manager_class,
distro=self.distro)
installer.post_install(p, self.params)
def _configure_files(self):
config_fns = self.configurator.config_files
if config_fns:
utils.log_iterable(config_fns, logger=LOG,
header="Configuring %s files" % (len(config_fns)))
for fn in config_fns:
tgt_fn = self.configurator.target_config(fn)
sh.mkdirslist(sh.dirname(tgt_fn), tracewriter=self.tracewriter)
(source_fn, contents) = self.configurator.source_config(fn)
LOG.debug("Configuring file %s ---> %s.", (source_fn), (tgt_fn))
contents = self.configurator.config_param_replace(fn, contents, self.config_params(fn))
contents = self.configurator.config_adjust(contents, fn)
sh.write_file(tgt_fn, contents, tracewriter=self.tracewriter)
return len(config_fns)
def _configure_symlinks(self):
links = self.configurator.symlinks
if not links:
return 0
# This sort happens so that we link in the correct order
# although it might not matter. Either way. We ensure that the right
# order happens. Ie /etc/blah link runs before /etc/blah/blah
link_srcs = sorted(links.keys())
link_srcs.reverse()
link_nice = []
for source in link_srcs:
links_to_be = links[source]
for link in links_to_be:
link_nice.append("%s => %s" % (link, source))
utils.log_iterable(link_nice, logger=LOG,
header="Creating %s sym-links" % (len(link_nice)))
links_made = 0
for source in link_srcs:
links_to_be = links[source]
for link in links_to_be:
try:
LOG.debug("Symlinking %s to %s.", link, source)
sh.symlink(source, link, tracewriter=self.tracewriter)
links_made += 1
except (IOError, OSError) as e:
LOG.warn("Symlinking %s to %s failed: %s", colorizer.quote(link), colorizer.quote(source), e)
return links_made
def prepare(self):
pass
def configure(self):
return self._configure_files() + self._configure_symlinks()
class PythonInstallComponent(PkgInstallComponent):
def __init__(self, *args, **kargs):
PkgInstallComponent.__init__(self, *args, **kargs)
tools_dir = sh.joinpths(self.get_option('app_dir'), 'tools')
self.requires_files = [
sh.joinpths(tools_dir, 'pip-requires'),
]
if self.get_bool_option('use_tests_requires', default_value=True):
self.requires_files.append(sh.joinpths(tools_dir, 'test-requires'))
def _get_download_config(self):
return 'get_from'
class PkgUninstallComponent(base.Component):
def __init__(self, *args, **kargs):
base.Component.__init__(self, *args, **kargs)
super(PkgUninstallComponent, self).__init__(*args, **kargs)
trace_fn = tr.trace_filename(self.get_option('trace_dir'), 'created')
self.tracereader = tr.TraceReader(trace_fn)
self.purge_packages = kargs.get('purge_packages')
@ -135,33 +260,8 @@ class PythonUninstallComponent(PkgUninstallComponent):
def uninstall(self):
self._uninstall_python()
self._uninstall_pips()
PkgUninstallComponent.uninstall(self)
def _uninstall_pips(self):
pips = self.tracereader.pips_installed()
if pips:
pip_names = set([p['name'] for p in pips])
utils.log_iterable(pip_names, logger=LOG,
header="Potentially removing %s python packages" % (len(pip_names)))
which_removed = []
with utils.progress_bar('Uninstalling', len(pips), reverse=True) as p_bar:
for (i, p) in enumerate(pips):
try:
uninstaller = make_packager(p, pip.Packager,
distro=self.distro,
remove_default=self.purge_packages)
if uninstaller.remove(p):
which_removed.append(p['name'])
except excp.ProcessExecutionError as e:
# NOTE(harlowja): pip seems to die if a pkg isn't there even in quiet mode
combined = (str(e.stderr) + str(e.stdout))
if not re.search(r"not\s+installed", combined, re.I):
raise
p_bar.update(i + 1)
utils.log_iterable(which_removed, logger=LOG,
header="Actually removed %s python packages" % (len(which_removed)))
def _uninstall_python(self):
py_listing = self.tracereader.py_listing()
if py_listing:
@ -176,451 +276,3 @@ class PythonUninstallComponent(PkgUninstallComponent):
sh.execute(*unsetup_cmd, cwd=where, run_as_root=True)
else:
LOG.warn("No python directory found at %s - skipping", colorizer.quote(where, quote_color='red'))
class PkgInstallComponent(base.Component):
def __init__(self, *args, **kargs):
base.Component.__init__(self, *args, **kargs)
trace_fn = tr.trace_filename(self.get_option('trace_dir'), 'created')
self.tracewriter = tr.TraceWriter(trace_fn, break_if_there=False)
self.configurator = conf.Configurator(self)
def _get_download_config(self):
return None
def _get_download_location(self):
key = self._get_download_config()
if not key:
return (None, None)
uri = self.get_option(key, default_value='').strip()
if not uri:
raise ValueError(("Could not find uri in config to download "
"from option %s") % (key))
return (uri, self.get_option('app_dir'))
def download(self):
(from_uri, target_dir) = self._get_download_location()
if not from_uri and not target_dir:
return []
else:
uris = [from_uri]
utils.log_iterable(uris, logger=LOG,
header="Downloading from %s uris" % (len(uris)))
sh.mkdirslist(target_dir, tracewriter=self.tracewriter)
# This is used to delete what is downloaded (done before
# fetching to ensure its cleaned up even on download failures)
self.tracewriter.download_happened(target_dir, from_uri)
fetcher = down.GitDownloader(self.distro, from_uri, target_dir)
fetcher.download()
return uris
def patch(self, section):
what_patches = self.get_option('patches', section)
(_from_uri, target_dir) = self._get_download_location()
if not what_patches:
what_patches = []
canon_what_patches = []
for path in what_patches:
if sh.isdir(path):
canon_what_patches.extend(sorted(sh.listdir(path, files_only=True)))
elif sh.isfile(path):
canon_what_patches.append(path)
if canon_what_patches:
patcher.apply_patches(canon_what_patches, target_dir)
def config_params(self, config_fn):
mp = dict(self.params)
if config_fn:
mp['CONFIG_FN'] = config_fn
return mp
@property
def packages(self):
pkg_list = self.get_option('packages', default_value=[])
if not pkg_list:
pkg_list = []
for name, values in self.subsystems.items():
if 'packages' in values:
LOG.debug("Extending package list with packages for subsystem: %r", name)
pkg_list.extend(values.get('packages'))
return pkg_list
def install(self):
LOG.debug('Preparing to install packages for: %r', self.name)
pkgs = self.packages
if pkgs:
pkg_names = set([p['name'] for p in pkgs])
utils.log_iterable(pkg_names, logger=LOG,
header="Setting up %s distribution packages" % (len(pkg_names)))
with utils.progress_bar('Installing', len(pkgs)) as p_bar:
for (i, p) in enumerate(pkgs):
installer = make_packager(p, self.distro.package_manager_class,
distro=self.distro)
installer.install(p)
# Mark that this happened so that we can uninstall it
self.tracewriter.package_installed(filter_package(p))
p_bar.update(i + 1)
def pre_install(self):
pkgs = self.packages
for p in pkgs:
installer = make_packager(p, self.distro.package_manager_class,
distro=self.distro)
installer.pre_install(p, self.params)
def post_install(self):
pkgs = self.packages
for p in pkgs:
installer = make_packager(p, self.distro.package_manager_class,
distro=self.distro)
installer.post_install(p, self.params)
def _configure_files(self):
config_fns = self.configurator.config_files
if config_fns:
utils.log_iterable(config_fns, logger=LOG,
header="Configuring %s files" % (len(config_fns)))
for fn in config_fns:
tgt_fn = self.configurator.target_config(fn)
sh.mkdirslist(sh.dirname(tgt_fn), tracewriter=self.tracewriter)
(source_fn, contents) = self.configurator.source_config(fn)
LOG.debug("Configuring file %s ---> %s.", (source_fn), (tgt_fn))
contents = self.configurator.config_param_replace(fn, contents, self.config_params(fn))
contents = self.configurator.config_adjust(contents, fn)
sh.write_file(tgt_fn, contents, tracewriter=self.tracewriter)
return len(config_fns)
def _configure_symlinks(self):
links = self.configurator.symlinks
if not links:
return 0
# This sort happens so that we link in the correct order
# although it might not matter. Either way. We ensure that the right
# order happens. Ie /etc/blah link runs before /etc/blah/blah
link_srcs = sorted(links.keys())
link_srcs.reverse()
link_nice = []
for source in link_srcs:
links_to_be = links[source]
for link in links_to_be:
link_nice.append("%s => %s" % (link, source))
utils.log_iterable(link_nice, logger=LOG,
header="Creating %s sym-links" % (len(link_nice)))
links_made = 0
for source in link_srcs:
links_to_be = links[source]
for link in links_to_be:
try:
LOG.debug("Symlinking %s to %s.", link, source)
sh.symlink(source, link, tracewriter=self.tracewriter)
links_made += 1
except (IOError, OSError) as e:
LOG.warn("Symlinking %s to %s failed: %s", colorizer.quote(link), colorizer.quote(source), e)
return links_made
def configure(self):
return self._configure_files() + self._configure_symlinks()
class PythonInstallComponent(PkgInstallComponent):
def __init__(self, *args, **kargs):
PkgInstallComponent.__init__(self, *args, **kargs)
self.requires_files = [
sh.joinpths(self.get_option('app_dir'), 'tools', 'pip-requires'),
]
if self.get_bool_option('use_tests_requires', default_value=True):
self.requires_files.append(sh.joinpths(self.get_option('app_dir'), 'tools', 'test-requires'))
def _get_download_config(self):
return 'get_from'
@property
def python_directories(self):
py_dirs = {}
app_dir = self.get_option('app_dir')
if sh.isdir(app_dir):
py_dirs[self.name] = app_dir
return py_dirs
@property
def packages(self):
pkg_list = super(PythonInstallComponent, self).packages
if not pkg_list:
pkg_list = []
pkg_list.extend(self._get_mapped_packages())
return pkg_list
@property
def pips_to_packages(self):
pip_pkg_list = self.get_option('pip_to_package', default_value=[])
if not pip_pkg_list:
pip_pkg_list = []
return pip_pkg_list
@property
def pip_requires(self):
all_pips = []
for fn in self.requires_files:
all_pips.extend(self._extract_pip_requires(fn))
return all_pips
def _match_pip_requires(self, pip_req):
def pip_use(who, there_pip):
if there_pip.key != pip_req.key:
return False
if not len(pip_req.specs):
# No version/restrictions specified
return True
there_version = None
if not there_pip.specs or there_pip == pip_req:
return True
# Different possibly incompat. versions found...
if there_version is None:
# Assume pip will install the correct version anyway
if who != self.name:
msg = ("Component %r asked for package '%s'"
" and '%s' is being selected from %r instead...")
LOG.debug(msg, self.name, pip_req, there_pip, who)
return True
else:
if who != self.name:
msg = ("Component %r provides package '%s'"
" but '%s' is being asked for by %r instead...")
LOG.warn(msg, who, there_pip, pip_req, self.name)
return False
LOG.debug("Attempting to find who satisfies pip requirement '%s'", pip_req)
# Try to find it in anyones pip -> pkg list
all_pip_2_pkgs = {
self.name: self.pips_to_packages,
}
# Gather them all (but only if they activate before me)
# since if they activate after, we can't depend on it
# to satisfy our requirement...
for (name, c) in self.instances.items():
if c is self or not c.activated:
continue
if isinstance(c, (PythonInstallComponent)):
all_pip_2_pkgs[name] = c.pips_to_packages
for (who, pips_2_pkgs) in all_pip_2_pkgs.items():
for pip_info in pips_2_pkgs:
there_pip = pip.extract_requirement(pip_info)
if not pip_use(who, there_pip):
continue
LOG.debug("Matched pip->pkg '%s' from component %r", there_pip, who)
return (dict(pip_info.get('package')), False)
# Ok nobody had it in a pip->pkg mapping
# but see if they had it in there pip collection
all_pips = {
self.name: self._base_pips(), # Use base pips to avoid recursion...
}
for (name, c) in self.instances.items():
if not c.activated or c is self:
continue
if isinstance(c, (PythonInstallComponent)):
all_pips[name] = c._base_pips() # pylint: disable=W0212
for (who, there_pips) in all_pips.items():
for pip_info in there_pips:
there_pip = pip.extract_requirement(pip_info)
if not pip_use(who, there_pip):
continue
LOG.debug("Matched pip '%s' from component %r", there_pip, who)
return (dict(pip_info), True)
# Ok nobody had it in there pip->pkg mapping or pip mapping
# but now lets see if we can automatically find
# a pip->pkg mapping for them using the good ole'
# rpm/yum database.
installer = make_packager({}, self.distro.package_manager_class,
distro=self.distro)
# TODO(harlowja): make this better
if installer and hasattr(installer, 'match_pip_2_package'):
try:
dist_pkg = installer.match_pip_2_package(pip_req)
if dist_pkg:
pkg_info = {
'name': str(dist_pkg.name),
'version': str(dist_pkg.version),
'__requirement': dist_pkg,
}
LOG.debug("Auto-matched (dist) %s -> %s", pip_req, dist_pkg)
return (pkg_info, False)
except excp.DependencyException as e:
LOG.warn("Unable to automatically map pip to package: %s", e)
# Ok still nobody has it, search pypi...
pypi_pkg = pip_helper.find_pypi_match(pip_req)
if pypi_pkg:
pkg_info = {
'name': str(pypi_pkg.key),
'__requirement': pypi_pkg,
}
try:
pkg_info['version'] = pypi_pkg.specs[0][1]
except IndexError:
pass
LOG.debug("Auto-matched (pypi) %s -> %s", pip_req, pypi_pkg)
return (pkg_info, True)
return (None, False)
def _get_mapped_packages(self):
add_on_pkgs = []
all_pips = self.pip_requires
for details in all_pips:
pkg_info = details['package']
from_pip = details['from_pip']
if from_pip or not pkg_info:
continue
# Keep the initial requirement
pkg_info = dict(pkg_info)
pkg_info['__requirement'] = details['requirement']
add_on_pkgs.append(pkg_info)
return add_on_pkgs
def _get_mapped_pips(self):
add_on_pips = []
all_pips = self.pip_requires
for details in all_pips:
pkg_info = details['package']
from_pip = details['from_pip']
if not from_pip or not pkg_info:
continue
# Keep the initial requirement
pkg_info = dict(pkg_info)
pkg_info['__requirement'] = details['requirement']
add_on_pips.append(pkg_info)
return add_on_pips
def _base_pips(self):
pip_list = self.get_option('pips', default_value=[])
if not pip_list:
pip_list = []
for (name, values) in self.subsystems.items():
if 'pips' in values:
LOG.debug("Extending pip list with pips for subsystem: %r" % (name))
pip_list.extend(values.get('pips'))
return pip_list
@property
def pips(self):
pip_list = self._base_pips()
pip_list.extend(self._get_mapped_pips())
return pip_list
def _install_pips(self):
pips = self.pips
if pips:
pip_names = set([p['name'] for p in pips])
utils.log_iterable(pip_names, logger=LOG,
header="Setting up %s python packages" % (len(pip_names)))
with utils.progress_bar('Installing', len(pips)) as p_bar:
for (i, p) in enumerate(pips):
installer = make_packager(p, pip.Packager,
distro=self.distro)
installer.install(p)
# Note that we did it so that we can remove it...
self.tracewriter.pip_installed(filter_package(p))
p_bar.update(i + 1)
def _clean_pip_requires(self):
# Fixup these files if they exist, sometimes they have 'junk' in them
# that anvil will install instead of pip or setup.py and we don't want
# the setup.py file to attempt to install said dependencies since it
# typically picks locations that either are not what we desire or if
# said file contains editables, it may even pick external source directories
# which is what anvil is setting up as well...
req_fns = [f for f in self.requires_files if sh.isfile(f)]
if req_fns:
utils.log_iterable(req_fns, logger=LOG,
header="Adjusting %s pip 'requires' files" % (len(req_fns)))
for fn in req_fns:
old_lines = sh.load_file(fn).splitlines()
new_lines = self._filter_pip_requires(fn, old_lines)
contents = "# Cleaned on %s\n\n%s\n" % (utils.iso8601(), "\n".join(new_lines))
sh.write_file_and_backup(fn, contents)
return len(req_fns)
def _filter_pip_requires(self, fn, lines):
# The default does no filtering except to ensure that said lines are valid...
return lines
def pre_install(self):
self._verify_pip_requires()
PkgInstallComponent.pre_install(self)
for p in self.pips:
installer = make_packager(p, pip.Packager,
distro=self.distro)
installer.pre_install(p, self.params)
def post_install(self):
PkgInstallComponent.post_install(self)
for p in self.pips:
installer = make_packager(p, pip.Packager,
distro=self.distro)
installer.post_install(p, self.params)
def _install_python_setups(self):
py_dirs = self.python_directories
if py_dirs:
real_dirs = {}
for (name, wkdir) in py_dirs.items():
real_dirs[name] = wkdir
if not real_dirs[name]:
real_dirs[name] = self.get_option('app_dir')
utils.log_iterable(real_dirs.values(), logger=LOG,
header="Setting up %s python directories" % (len(real_dirs)))
setup_cmd = self.distro.get_command('python', 'setup')
for (name, working_dir) in real_dirs.items():
sh.mkdirslist(working_dir, tracewriter=self.tracewriter)
setup_fn = sh.joinpths(self.get_option('trace_dir'), "%s.python.setup" % (name))
sh.execute(*setup_cmd, cwd=working_dir, run_as_root=True,
stderr_fn='%s.stderr' % (setup_fn),
stdout_fn='%s.stdout' % (setup_fn),
tracewriter=self.tracewriter)
self.tracewriter.py_installed(name, working_dir)
def _python_install(self):
self._install_pips()
self._install_python_setups()
@decorators.memoized
def _extract_pip_requires(self, fn):
if not sh.isfile(fn):
return []
LOG.debug("Resolving dependencies from %s.", colorizer.quote(fn))
pips_needed = pip_helper.parse_requirements(sh.load_file(fn))
matchings = []
for req in pips_needed:
(pkg_info, from_pip) = self._match_pip_requires(req)
matchings.append({
'requirement': req,
'package': pkg_info,
'from_pip': from_pip,
'needed_by': fn,
})
return matchings
def _verify_pip_requires(self):
all_pips = self.pip_requires
for details in all_pips:
req = details['requirement']
needed_by = details['needed_by']
pkg_info = details['package']
if not pkg_info:
raise excp.DependencyException(("Pip dependency '%s' needed by '%s' is not translatable to a listed"
" (from this or previously activated components) pip package"
' or a pip->package mapping!') % (req, needed_by))
def install(self):
PkgInstallComponent.install(self)
self._python_install()
def configure(self):
configured_am = PkgInstallComponent.configure(self)
configured_am += self._clean_pip_requires()
return configured_am

View File

@ -31,18 +31,10 @@ SYNC_DB_CMD = [sh.joinpths('$BIN_DIR', 'cinder-manage'),
# Available commands:
'db', 'sync']
BIN_DIR = 'bin'
class CinderUninstaller(binstall.PythonUninstallComponent):
def __init__(self, *args, **kargs):
binstall.PythonUninstallComponent.__init__(self, *args, **kargs)
self.bin_dir = sh.joinpths(self.get_option('app_dir'), BIN_DIR)
class CinderInstaller(binstall.PythonInstallComponent):
def __init__(self, *args, **kargs):
binstall.PythonInstallComponent.__init__(self, *args, **kargs)
self.bin_dir = sh.joinpths(self.get_option('app_dir'), BIN_DIR)
self.configurator = cconf.CinderConfigurator(self)
def post_install(self):
@ -51,13 +43,6 @@ class CinderInstaller(binstall.PythonInstallComponent):
self.configurator.setup_db()
self._sync_db()
def _filter_pip_requires(self, fn, lines):
return [l for l in lines
# Take out entries that aren't really always needed or are
# resolved/installed by anvil during installation in the first
# place..
if not utils.has_any(l.lower(), 'oslo.config')]
def _sync_db(self):
LOG.info("Syncing cinder to database: %s", colorizer.quote(self.configurator.DB_NAME))
cmds = [{'cmd': SYNC_DB_CMD, 'run_as_root': True}]
@ -65,14 +50,13 @@ class CinderInstaller(binstall.PythonInstallComponent):
def config_params(self, config_fn):
mp = binstall.PythonInstallComponent.config_params(self, config_fn)
mp['BIN_DIR'] = sh.joinpths(self.get_option('app_dir'), BIN_DIR)
mp['BIN_DIR'] = self.bin_dir
return mp
class CinderRuntime(bruntime.PythonRuntime):
def __init__(self, *args, **kargs):
bruntime.PythonRuntime.__init__(self, *args, **kargs)
self.bin_dir = sh.joinpths(self.get_option('app_dir'), BIN_DIR)
self.config_path = sh.joinpths(self.get_option('cfg_dir'), cconf.API_CONF)
@property

View File

@ -42,26 +42,11 @@ SYNC_DB_CMD = [sh.joinpths('$BIN_DIR', 'glance-manage'),
BIN_DIR = '/usr/bin/'
class GlanceUninstaller(binstall.PythonUninstallComponent):
def __init__(self, *args, **kargs):
binstall.PythonUninstallComponent.__init__(self, *args, **kargs)
self.bin_dir = BIN_DIR
class GlanceInstaller(binstall.PythonInstallComponent):
def __init__(self, *args, **kargs):
binstall.PythonInstallComponent.__init__(self, *args, **kargs)
self.bin_dir = BIN_DIR
self.configurator = gconf.GlanceConfigurator(self)
def _filter_pip_requires(self, fn, lines):
return [l for l in lines
# Take out entries that aren't really always needed or are
# resolved/installed by anvil during installation in the first
# place..
if not utils.has_any(l.lower(), 'swift', 'keystoneclient',
'oslo.config')]
def post_install(self):
binstall.PythonInstallComponent.post_install(self)
if self.get_bool_option('db-sync'):
@ -89,10 +74,6 @@ class GlanceInstaller(binstall.PythonInstallComponent):
class GlanceRuntime(bruntime.PythonRuntime):
def __init__(self, *args, **kargs):
bruntime.PythonRuntime.__init__(self, *args, **kargs)
self.bin_dir = BIN_DIR
@property
def applications(self):
apps = []

View File

@ -14,12 +14,8 @@
# License for the specific language governing permissions and limitations
# under the License.
from anvil.components import base_install as binstall
from anvil.components import base_testing as btesting
class GlanceClientInstaller(binstall.PythonInstallComponent):
pass
class GlanceClientTester(btesting.PythonTestingComponent):
def _use_run_tests(self):

View File

@ -26,7 +26,6 @@ from anvil.components.configurators import horizon as hconf
import binascii
import os
import re
LOG = logging.getLogger(__name__)
@ -56,12 +55,6 @@ class HorizonInstaller(binstall.PythonInstallComponent):
'horizon_error.log')
self.configurator = hconf.HorizonConfigurator(self)
def _filter_pip_requires(self, fn, lines):
# Knock off all nova, quantum, swift, keystone, cinder
# clients since anvil will be making sure those are installed
# instead of asking setup.py to do it...
return [l for l in lines if not re.search(r'([n|q|s|k|g|c]\w+client)', l, re.I)]
def verify(self):
binstall.PythonInstallComponent.verify(self)
self._check_ug()

View File

@ -46,26 +46,12 @@ MANAGE_CMD = [sh.joinpths('$BIN_DIR', 'keystone-manage'),
'--config-file=$CONFIG_FILE',
'--debug', '-v']
class KeystoneUninstaller(binstall.PythonUninstallComponent):
def __init__(self, *args, **kargs):
binstall.PythonUninstallComponent.__init__(self, *args, **kargs)
class KeystoneInstaller(binstall.PythonInstallComponent):
def __init__(self, *args, **kargs):
binstall.PythonInstallComponent.__init__(self, *args, **kargs)
self.bin_dir = sh.joinpths(self.get_option('app_dir'), 'bin')
self.configurator = kconf.KeystoneConfigurator(self)
def _filter_pip_requires(self, fn, lines):
return [l for l in lines
# Take out entries that aren't really always needed or are
# resolved/installed by anvil during installation in the first
# place..
if not utils.has_any(l.lower(), 'keystoneclient', 'oslo.config',
'ldap', 'http://tarballs.openstack.org',
'memcached')]
def post_install(self):
binstall.PythonInstallComponent.post_install(self)
if self.get_bool_option('db-sync'):
@ -119,7 +105,6 @@ class KeystoneInstaller(binstall.PythonInstallComponent):
class KeystoneRuntime(bruntime.PythonRuntime):
def __init__(self, *args, **kargs):
bruntime.PythonRuntime.__init__(self, *args, **kargs)
self.bin_dir = sh.joinpths(self.get_option('app_dir'), 'bin')
self.init_fn = sh.joinpths(self.get_option('trace_dir'), INIT_WHAT_HAPPENED)
def _filter_init(self, init_what):

View File

@ -1,27 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from anvil import utils
from anvil.components import base_install as binstall
class KeystoneClientInstaller(binstall.PythonInstallComponent):
def _filter_pip_requires(self, fn, lines):
return [l for l in lines
# Take out entries that aren't really always needed or are
# resolved/installed by anvil during installation in the first
# place..
if not utils.has_any(l.lower(), 'oslo.config')]

View File

@ -60,9 +60,6 @@ FLOATING_NET_CMDS = [
},
]
# Subdirs of the checkout/download
BIN_DIR = 'bin'
class NovaUninstaller(binstall.PythonUninstallComponent):
def __init__(self, *args, **kargs):
@ -97,15 +94,6 @@ class NovaInstaller(binstall.PythonInstallComponent):
binstall.PythonInstallComponent.__init__(self, *args, **kargs)
self.configurator = nconf.NovaConfigurator(self)
def _filter_pip_requires(self, fn, lines):
return [l for l in lines
# Take out entries that aren't really always needed or are
# resolved/installed by anvil during installation in the first
# place..
if not utils.has_any(l.lower(), 'quantumclient',
'cinder', 'glance', 'ldap', 'oslo.config',
'keystoneclient')]
@property
def env_exports(self):
to_set = utils.OrderedDict()
@ -154,7 +142,7 @@ class NovaInstaller(binstall.PythonInstallComponent):
def config_params(self, config_fn):
mp = binstall.PythonInstallComponent.config_params(self, config_fn)
mp['CFG_FILE'] = sh.joinpths(self.get_option('cfg_dir'), nconf.API_CONF)
mp['BIN_DIR'] = sh.joinpths(self.get_option('app_dir'), BIN_DIR)
mp['BIN_DIR'] = self.bin_dir
return mp
@ -164,7 +152,6 @@ class NovaRuntime(bruntime.PythonRuntime):
self.wait_time = self.get_int_option('service_wait_seconds')
self.virsh = lv.Virsh(self.wait_time, self.distro)
self.config_path = sh.joinpths(self.get_option('cfg_dir'), nconf.API_CONF)
self.bin_dir = sh.joinpths(self.get_option('app_dir'), BIN_DIR)
self.net_init_fn = sh.joinpths(self.get_option('trace_dir'), NET_INITED_FN)
def _do_network_init(self):

View File

@ -16,7 +16,6 @@
from anvil import shell as sh
from anvil.components import base_install as binstall
from anvil.components import base_runtime as bruntime
# Where the application is really
@ -25,17 +24,6 @@ UTIL_DIR = 'utils'
VNC_PROXY_APP = 'nova-novncproxy'
class NoVNCUninstaller(binstall.PythonUninstallComponent):
pass
class NoVNCInstaller(binstall.PythonInstallComponent):
@property
def python_directories(self):
# Its python but not one that we need to run setup.py in...
return {}
class NoVNCRuntime(bruntime.PythonRuntime):
@property
def applications(self):

View File

@ -14,17 +14,8 @@
# License for the specific language governing permissions and limitations
# under the License.
from anvil import utils
from anvil.components import base_install as binstall
from anvil.components import base_testing as btesting
class OpenStackClientInstaller(binstall.PythonInstallComponent):
def _filter_pip_requires(self, fn, lines):
return [l for l in lines
if not utils.has_any(l.lower(),
'keystoneclient', 'novaclient', 'glanceclient')]
class OpenStackClientTester(btesting.PythonTestingComponent):
def _use_run_tests(self):

View File

@ -19,18 +19,6 @@ from anvil.components import base_install as binstall
class Installer(binstall.PythonInstallComponent):
@property
def packages(self):
pkg_list = super(Installer, self).packages
if not pkg_list:
pkg_list = []
# If any pips that have mapped packages, suck them out as well
pips_to_packages = self.pips_to_packages
for pip_to_package in pips_to_packages:
if 'package' in pip_to_package:
pkg_list.append(pip_to_package['package'])
return pkg_list
def _get_python_directories(self):
return {}

View File

@ -17,7 +17,6 @@
from anvil import colorizer
from anvil import log as logging
from anvil import shell as sh
from anvil import utils
from anvil.components import base_install as binstall
from anvil.components import base_runtime as bruntime
@ -30,18 +29,10 @@ LOG = logging.getLogger(__name__)
SYNC_DB_CMD = [sh.joinpths("$BIN_DIR", "quantum-db-manage"),
"sync"]
BIN_DIR = "bin"
class QuantumUninstaller(binstall.PythonUninstallComponent):
def __init__(self, *args, **kargs):
super(QuantumUninstaller, self).__init__(*args, **kargs)
self.bin_dir = sh.joinpths(self.get_option("app_dir"), BIN_DIR)
class QuantumInstaller(binstall.PythonInstallComponent):
def __init__(self, *args, **kargs):
super(QuantumInstaller, self).__init__(*args, **kargs)
self.bin_dir = sh.joinpths(self.get_option("app_dir"), BIN_DIR)
self.configurator = qconf.QuantumConfigurator(self)
def post_install(self):
@ -50,13 +41,6 @@ class QuantumInstaller(binstall.PythonInstallComponent):
self.configurator.setup_db()
self._sync_db()
def _filter_pip_requires(self, fn, lines):
# Take out entries that aren't really always needed or are
# resolved/installed by anvil during installation in the first
# place..
return [l for l in lines
if not utils.has_any(l.lower(), "oslo.config")]
def _sync_db(self):
LOG.info("Syncing quantum to database: %s", colorizer.quote(self.configurator.DB_NAME))
#cmds = [{"cmd": SYNC_DB_CMD, "run_as_root": True}]
@ -77,8 +61,6 @@ class QuantumRuntime(bruntime.PythonRuntime):
def __init__(self, *args, **kargs):
super(QuantumRuntime, self).__init__(*args, **kargs)
# TODO(aababilov): move to base class
self.bin_dir = sh.joinpths(self.get_option("app_dir"), BIN_DIR)
self.config_path = sh.joinpths(self.get_option("cfg_dir"), qconf.API_CONF)
# TODO(aababilov): move to base class

View File

@ -1,25 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from anvil import utils
from anvil.components import base_install as binstall
class SwiftClientInstaller(binstall.PythonInstallComponent):
def _filter_pip_requires(self, fn, lines):
return [l for l in lines
if not utils.has_any(l.lower(), 'keystoneclient')]

View File

@ -37,10 +37,14 @@ Component = collections.namedtuple( # pylint: disable=C0103
class Distro(object):
def __init__(self, name, platform_pattern, packager_name, commands, components):
def __init__(self,
name, platform_pattern,
packager_name, dependency_handler,
commands, components):
self.name = name
self._platform_pattern = re.compile(platform_pattern, re.IGNORECASE)
self._packager_name = packager_name
self._dependency_handler = dependency_handler
self._commands = commands
self._components = components
@ -90,6 +94,11 @@ class Distro(object):
"""Return a package manager that will work for this distro."""
return importer.import_entry_point(self._packager_name)
@property
def dependency_handler_class(self):
"""Return a dependency handler that will work for this distro."""
return importer.import_entry_point(self._dependency_handler)
def extract_component(self, name, action):
"""Return the class + component info to use for doing the action w/the component."""
try:

View File

@ -63,40 +63,40 @@ class GitDownloader(Downloader):
uri = uri.strip()
if not branch:
branch = 'master'
if sh.isdir(self.store_where) and sh.isdir(sh.joinpths(self.store_where, '.git')):
LOG.info("Existing git directory located at %s, leaving it alone.", colorizer.quote(self.store_where))
# do git clean -xdfq and git reset --hard to undo possible changes
cmd = list(self.distro.get_command("git", "clean")) + ["-xdfq"]
sh.execute(*cmd, cwd=self.store_where, run_as_root=True)
cmd = list(self.distro.get_command("git", "reset")) + ["--hard"]
sh.execute(*cmd, cwd=self.store_where)
else:
LOG.info("Downloading %s (%s) to %s.", colorizer.quote(uri), branch, colorizer.quote(self.store_where))
cmd = list(self.distro.get_command('git', 'clone'))
cmd += [uri, self.store_where]
sh.execute(*cmd)
if branch or tag:
if tag:
# Avoid 'detached HEAD state' message by moving to a
# $tag-anvil branch for that tag
new_branch = "%s-%s" % (tag, 'anvil')
checkout_what = [tag, '-b', new_branch]
LOG.info("Adjusting to tag %s.", colorizer.quote(tag))
else:
# Set it up to track the remote branch correctly
new_branch = branch
checkout_what = ['-t', '-b', new_branch, 'origin/%s' % branch]
LOG.info("Adjusting branch to %s.", colorizer.quote(branch))
git_checkout = list(self.distro.get_command('git', 'checkout'))
git_branch = list(self.distro.get_command('git', 'branch'))
if sh.isdir(self.store_where) and sh.isdir(sh.joinpths(self.store_where, '.git')):
LOG.info("Existing git directory located at %s, leaving it alone.", colorizer.quote(self.store_where))
# do git clean -xdfq and git reset --hard to undo possible changes
cmd = ["git", "clean", "-xdfq"]
sh.execute(*cmd, cwd=self.store_where)
cmd = ["git", "reset", "--hard"]
sh.execute(*cmd, cwd=self.store_where)
# detach, drop new_branch if it exists, and checkout to new_branch
# newer git allows branch resetting: git checkout -B $new_branch
# so, all these are for compatibility with older RHEL git
cmd = git_checkout + ["--detach"]
cmd = ["git", "rev-parse", "HEAD"]
git_head = sh.execute(*cmd, cwd=self.store_where)[0].strip()
cmd = ["git", "checkout", git_head]
sh.execute(*cmd, cwd=self.store_where)
cmd = ["git", "branch", "-D", new_branch]
sh.execute(*cmd, cwd=self.store_where, ignore_exit_code=True)
cmd = git_branch + ["-D", new_branch]
sh.execute(*cmd, cwd=self.store_where, ignore_exit_code=True)
cmd = git_checkout + checkout_what
else:
LOG.info("Downloading %s (%s) to %s.", colorizer.quote(uri), branch, colorizer.quote(self.store_where))
cmd = ["git", "clone", uri, self.store_where]
sh.execute(*cmd)
if tag:
LOG.info("Adjusting to tag %s.", colorizer.quote(tag))
else:
LOG.info("Adjusting branch to %s.", colorizer.quote(branch))
cmd = ["git", "checkout"] + checkout_what
sh.execute(*cmd, cwd=self.store_where)

View File

@ -1,73 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from anvil import colorizer
from anvil import log as logging
from anvil import type_utils
from anvil import utils
LOG = logging.getLogger(__name__)
class Packager(object):
__meta__ = abc.ABCMeta
def __init__(self, distro, remove_default=False):
self.distro = distro
self.remove_default = remove_default
@abc.abstractmethod
def _anything_there(self, pkg):
raise NotImplementedError()
def install(self, pkg):
installed_already = self._anything_there(pkg)
if not installed_already:
self._install(pkg)
LOG.debug("Installed %s", pkg)
else:
LOG.debug("Skipping install of %r since %s is already there.", pkg['name'], installed_already)
def remove(self, pkg):
should_remove = self.remove_default
if 'removable' in pkg:
should_remove = type_utils.make_bool(pkg['removable'])
if not should_remove:
return False
self._remove(pkg)
return True
def pre_install(self, pkg, params=None):
cmds = pkg.get('pre-install')
if cmds:
LOG.info("Running pre-install commands for package %s.", colorizer.quote(pkg['name']))
utils.execute_template(*cmds, params=params)
def post_install(self, pkg, params=None):
cmds = pkg.get('post-install')
if cmds:
LOG.info("Running post-install commands for package %s.", colorizer.quote(pkg['name']))
utils.execute_template(*cmds, params=params)
@abc.abstractmethod
def _remove(self, pkg):
raise NotImplementedError()
@abc.abstractmethod
def _install(self, pkg):
raise NotImplementedError()

View File

@ -13,3 +13,13 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Package formats and package management systems support.
Supported formats:
- pip
- RPM
Supported systems:
- pip
- YUM
"""

299
anvil/packaging/base.py Normal file
View File

@ -0,0 +1,299 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# R0921: Abstract class not referenced
#pylint: disable=R0921
import abc
import pkg_resources
from anvil import colorizer
from anvil.components import base as component_base
from anvil import log as logging
from anvil import shell as sh
from anvil import type_utils
from anvil import utils
LOG = logging.getLogger(__name__)
class Packager(object):
"""Basic class for package management systems support.
"""
__meta__ = abc.ABCMeta
def __init__(self, distro, remove_default=False):
self.distro = distro
self.remove_default = remove_default
def remove(self, pkg):
should_remove = self.remove_default
if 'removable' in pkg:
should_remove = type_utils.make_bool(pkg['removable'])
if not should_remove:
return False
self._remove(pkg)
return True
def pre_install(self, pkg, params=None):
cmds = pkg.get('pre-install')
if cmds:
LOG.info("Running pre-install commands for package %s.", colorizer.quote(pkg['name']))
utils.execute_template(*cmds, params=params)
def post_install(self, pkg, params=None):
cmds = pkg.get('post-install')
if cmds:
LOG.info("Running post-install commands for package %s.", colorizer.quote(pkg['name']))
utils.execute_template(*cmds, params=params)
@abc.abstractmethod
def _remove(self, pkg):
pass
@abc.abstractmethod
def _install(self, pkg):
pass
OPENSTACK_PACKAGES = set([
"cinder",
"glance",
"horizon",
"keystone",
"nova",
"oslo.config",
"quantum",
"swift",
"python-cinderclient",
"python-glanceclient",
"python-keystoneclient",
"python-novaclient",
"python-quantumclient",
"python-swiftclient",
])
class DependencyHandler(object):
"""Basic class for handler of OpenStack dependencies.
"""
multipip_executable = sh.which("multipip", ["multipip"])
# Update requirements to make them allow already installed packages
force_frozen = True
def __init__(self, distro, root_dir, instances):
self.distro = distro
self.root_dir = root_dir
self.instances = instances
self.deps_dir = sh.joinpths(self.root_dir, "deps")
self.download_dir = sh.joinpths(self.deps_dir, "download")
self.gathered_requires_filename = sh.joinpths(
self.deps_dir, "pip-requires")
self.forced_requires_filename = sh.joinpths(
self.deps_dir, "forced-requires")
self.pip_executable = str(self.distro.get_command_config('pip'))
self.pips_to_install = []
self.forced_packages = []
# nopips is a list of items that fail to build from Python packages,
# but their RPMs are available from base and epel repos
self.nopips = []
# these packages conflict with our deps and must be removed
self.nopackages = []
self.package_dirs = self._get_package_dirs(instances)
self.python_names = self._get_python_names(self.package_dirs)
@staticmethod
def _get_package_dirs(instances):
package_dirs = []
for inst in instances:
app_dir = inst.get_option("app_dir")
if sh.isfile(sh.joinpths(app_dir, "setup.py")):
package_dirs.append(app_dir)
return package_dirs
@staticmethod
def _get_python_names(package_dirs):
python_names = []
for pkg_dir in package_dirs:
cmdline = ["python", "setup.py", "--name"]
python_names.append(sh.execute(*cmdline, cwd=pkg_dir)[0].
splitlines()[-1].strip())
return python_names
def package(self):
requires_files = []
extra_pips = []
self.nopips = []
for inst in self.instances:
try:
requires_files.extend(inst.requires_files)
except AttributeError:
pass
for pkg in inst.get_option("pips") or []:
extra_pips.append(
"%s%s" % (pkg["name"], pkg.get("version", "")))
for pkg in inst.get_option("nopips") or []:
self.nopips.append(pkg["name"])
requires_files = filter(sh.isfile, requires_files)
self.gather_pips_to_install(requires_files, extra_pips)
self.clean_pip_requires(requires_files)
def install(self):
self.nopackages = []
for inst in self.instances:
for pkg in inst.get_option("nopackages") or []:
self.nopackages.append(pkg["name"])
def clean_pip_requires(self, requires_files):
# Fixup incompatible dependencies
if not (requires_files and self.forced_packages):
return
utils.log_iterable(
sorted(requires_files),
logger=LOG,
header="Adjusting %s pip 'requires' files" %
(len(requires_files)))
forced_by_key = dict((pkg.key, pkg) for pkg in self.forced_packages)
for fn in requires_files:
old_lines = sh.load_file(fn).splitlines()
new_lines = []
for line in old_lines:
try:
req = pkg_resources.Requirement.parse(line)
new_lines.append(str(forced_by_key[req.key]))
except:
# we don't force the package or it has a bad format
new_lines.append(line)
contents = "# Cleaned on %s\n\n%s\n" % (
utils.iso8601(), "\n".join(new_lines))
sh.write_file_and_backup(fn, contents)
def gather_pips_to_install(self, requires_files, extra_pips=None):
"""Analyze requires_files and extra_pips.
Updates `self.forced_packages` and `self.pips_to_install`.
If `self.force_frozen`, update requirements to make them allow already
installed packages.
Writes requirements to `self.gathered_requires_filename`.
"""
extra_pips = extra_pips or []
cmdline = [
self.multipip_executable,
"--skip-requirements-regex",
"python.*client",
"--pip",
self.pip_executable
]
if self.force_frozen:
cmdline.append("--frozen")
cmdline = cmdline + extra_pips + ["-r"] + requires_files
output = sh.execute(*cmdline, ignore_exit_code=True)
conflict_descr = output[1].strip()
forced_keys = set()
if conflict_descr:
for line in conflict_descr.splitlines():
LOG.warning(line)
if line.endswith(": incompatible requirements"):
forced_keys.add(line.split(":", 1)[0].lower())
self.pips_to_install = [
pkg
for pkg in utils.splitlines_not_empty(output[0])
if pkg.lower() not in OPENSTACK_PACKAGES]
sh.write_file(self.gathered_requires_filename,
"\n".join(self.pips_to_install))
if not self.pips_to_install:
LOG.error("No dependencies for OpenStack found."
"Something went wrong. Please check:")
LOG.error("'%s'" % "' '".join(cmdline))
raise RuntimeError("No dependencies for OpenStack found")
utils.log_iterable(sorted(self.pips_to_install),
logger=LOG,
header="Full known Python dependency list")
self.forced_packages = []
for pip in self.pips_to_install:
req = pkg_resources.Requirement.parse(pip)
if req.key in forced_keys:
self.forced_packages.append(req)
sh.write_file(self.forced_requires_filename,
"\n".join(str(req) for req in self.forced_packages))
def download_dependencies(self, ignore_installed=True, clear_cache=False):
"""Download dependencies from `$deps_dir/download-requires`.
:param ignore_installed: do not download already installed packages
:param clear_cache: clear `$deps_dir/cache` dir (pip can work incorrectly
when it has a cache)
"""
cache_dir = sh.joinpths(self.deps_dir, "cache")
if clear_cache:
sh.deldir(cache_dir)
sh.mkdir(self.deps_dir, recurse=True)
download_requires_filename = sh.joinpths(
self.deps_dir, "download-requires")
nopips = self.nopips + self.python_names
if ignore_installed or nopips:
cmdline = [
self.multipip_executable,
"--pip", self.pip_executable,
]
if ignore_installed:
cmdline += [
"--ignore-installed",
]
cmdline.extend(self.pips_to_install)
if nopips:
cmdline.append("--ignore-packages")
cmdline.extend(nopips)
output = sh.execute(*cmdline)
pips_to_download = list(utils.splitlines_not_empty(output[0]))
else:
pips_to_download = self.pips_to_install
sh.write_file(download_requires_filename,
"\n".join(str(req) for req in pips_to_download))
if not pips_to_download:
return []
# NOTE(aababilov): pip has issues with already downloaded files
sh.deldir(self.download_dir)
sh.mkdir(self.download_dir, recurse=True)
cmdline = [
self.pip_executable,
"install",
"--download",
self.download_dir,
"--download-cache",
cache_dir,
"-r",
download_requires_filename,
]
out_filename = sh.joinpths(self.deps_dir, "pip-install-download.out")
utils.log_iterable(sorted(pips_to_download), logger=LOG,
header="Downloading Python dependencies")
LOG.info("You can watch progress in another terminal with")
LOG.info(" tail -f %s" % out_filename)
with open(out_filename, "w") as out:
sh.execute(*cmdline, stdout_fh=out, stderrr_fh=out)
return sh.listdir(self.download_dir, files_only=True)
class EmptyPackager(component_base.Component):
def package(self):
return None

View File

@ -16,7 +16,6 @@
import copy
import pkg_resources
import xmlrpclib
from anvil import log as logging
from anvil import shell as sh
@ -58,23 +57,6 @@ def _skip_requirement(line):
return False
def find_pypi_match(req, pypi_url='http://python.org/pypi'):
try:
pypi = xmlrpclib.ServerProxy(pypi_url)
LOG.debug("Searching pypi @ %s for %s", pypi_url, req)
for version in pypi.package_releases(req.key, True):
if version in req:
LOG.debug("Found match in pypi: %s==%s satisfies %s",
req.key, version, req)
return req
else:
LOG.debug("Found potential match: %s==%s doesn't satisfy %s",
req.key, version, req)
except (IOError, xmlrpclib.Fault, xmlrpclib.Error) as e:
LOG.warn("Scanning pypi failed: %s", e)
return None
def parse_requirements(contents, adjust=False):
lines = []
for line in contents.splitlines():

View File

@ -14,9 +14,8 @@
# License for the specific language governing permissions and limitations
# under the License.
from anvil import exceptions as excp
from anvil import log as logging
from anvil import packager as pack
from anvil.packaging import base
from anvil import shell as sh
from anvil.packaging.helpers import pip_helper
@ -33,43 +32,15 @@ def extract_requirement(pkg_info):
pkg_info.get('name', ''), pkg_info.get('version'))
class Packager(pack.Packager):
class Packager(base.Packager):
def __init__(self, distro, remove_default=False):
pack.Packager.__init__(self, distro, remove_default)
super(Packager, self).__init__(distro, remove_default)
self.helper = pip_helper.Helper(distro)
self.upgraded = {}
def _get_pip_command(self):
return self.distro.get_command_config('pip')
def _anything_there(self, pip):
wanted_pip = extract_requirement(pip)
pip_there = self.helper.get_installed(wanted_pip.key)
if not pip_there:
# Nothing installed
return None
# Check if version wanted will work with whats installed
if pip_there.specs[0][1] not in wanted_pip:
is_upgrading = False
for o in ['-U', '--upgrade']:
if o in pip.get('options', []):
is_upgrading = True
if is_upgrading and (wanted_pip.key not in self.upgraded):
# Upgrade should hopefully get that package to the right version....
LOG.warn("Upgrade is occuring for %s, even though %s is installed.",
wanted_pip, pip_there)
# Mark it so that we don't keep on flip-flopping on upgrading this
# package (ie install new, install old, install new....)
self.upgraded[wanted_pip.key] = wanted_pip
return None
else:
msg = ("Pip %s is already installed"
" and it is not compatible with desired"
" pip %s")
msg = msg % (pip_there, wanted_pip)
raise excp.DependencyException(msg)
return pip_there
def _execute_pip(self, cmd):
pip_cmd = self._get_pip_command()
if not isinstance(pip_cmd, (list, tuple)):
@ -82,18 +53,6 @@ class Packager(pack.Packager):
# not consistent anymore so uncache it
self.helper.uncache()
def _install(self, pip):
cmd = ['install'] + PIP_INSTALL_CMD_OPTS
options = pip.get('options')
if options:
if not isinstance(options, (list, tuple, set)):
options = [str(options)]
for opt in options:
cmd.append(str(opt))
install_what = extract_requirement(pip)
cmd.append(str(install_what))
self._execute_pip(cmd)
def _remove(self, pip):
# Versions don't seem to matter here...
remove_what = extract_requirement(pip)

View File

@ -1,324 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from anvil import colorizer
from anvil import exceptions as excp
from anvil import log as logging
from anvil import patcher
from anvil import shell as sh
from anvil import trace as tr
from anvil import type_utils as tu
from anvil import utils
from anvil.components import base as comp
from anvil.packaging.helpers import changelog
from anvil.packaging.helpers import yum_helper
LOG = logging.getLogger(__name__)
RPM_DIR_NAMES = ['sources', 'specs', 'srpms', 'rpms', 'build']
class DependencyPackager(comp.Component):
def __init__(self, *args, **kwargs):
comp.Component.__init__(self, *args, **kwargs)
self.tracewriter = tr.TraceWriter(tr.trace_filename(self.get_option('trace_dir'), 'created'),
break_if_there=False)
self.package_dir = sh.joinpths(self.get_option('component_dir'), 'package')
self.match_installed = tu.make_bool(kwargs.get('match_installed'))
self._build_paths = None
self._details = None
self._helper = yum_helper.Helper()
@property
def build_paths(self):
if self._build_paths is None:
build_paths = {}
for name in RPM_DIR_NAMES:
final_path = sh.joinpths(self.package_dir, name.upper())
build_paths[name] = final_path
if sh.isdir(final_path):
sh.deldir(final_path, True)
sh.mkdirslist(final_path, tracewriter=self.tracewriter)
self._build_paths = build_paths
return copy.deepcopy(self._build_paths) # Return copy (not the same instance)
def _patches(self):
in_patches = patcher.expand_patches(self.get_option('patches', 'package'))
your_patches = []
for path in in_patches:
target_path = sh.joinpths(self.build_paths['sources'], sh.basename(path))
sh.copy(path, target_path)
your_patches.append(sh.basename(target_path))
return your_patches
def _requirements(self):
return {
'install': self._install_requirements(),
'build': self._build_requirements(),
}
def _match_version_installed(self, yum_pkg):
if not self.match_installed:
return yum_pkg
installed_pkgs = self._helper.get_installed(yum_pkg['name'])
if not len(installed_pkgs):
return yum_pkg
installed_pkg = installed_pkgs[0]
# Send back a modified copy with the installed version
yum_pkg = copy.deepcopy(yum_pkg)
yum_pkg['version'] = str(installed_pkg.printVer())
return yum_pkg
def _get_packager(self):
return "%s <%s@%s>" % (sh.getuser(), sh.getuser(), sh.hostname())
def _get_summary(self):
return 'Package build of %s on %s' % (self.name, utils.iso8601())
@property
def details(self):
if self._details is not None:
return self._details
self._details = {
'name': self.get_option("rpm_package_name", default_value=self.name),
'version': 0,
'release': self.get_option('release', default_value=1),
'packager': self._get_packager(),
'changelog': '',
'license': 'Apache License, Version 2.0',
'automatic_dependencies': True,
'vendor': None,
'url': '',
'description': '',
'summary': self._get_summary(),
}
return self._details
def _build_details(self):
return {
'arch': 'noarch',
}
def _gather_files(self):
source_fn = self._make_source_archive()
sources = []
if source_fn:
sources.append(source_fn)
return {
'sources': sources,
'files': [],
'directories': [],
'docs': [],
}
def _defines(self):
define_what = []
define_what.append("_topdir %s" % (self.package_dir))
return define_what
def _undefines(self):
undefine_what = []
return undefine_what
def _make_source_archive(self):
return None
def _make_fn(self, ext):
your_fn = "%s-%s-%s.%s" % (self.details['name'],
self.details['version'],
self.details['release'], ext)
return your_fn
def _obsoletes(self):
return []
def _conflicts(self):
return []
def _create_package(self):
files = self._gather_files()
params = {
'files': files,
'requires': self._requirements(),
'obsoletes': self._obsoletes(),
'conflicts': self._conflicts(),
'defines': self._defines(),
'undefines': self._undefines(),
'build': self._build_details(),
'who': sh.getuser(),
'date': utils.iso8601(),
'patches': self._patches(),
'details': self.details,
}
(_fn, content) = utils.load_template('packaging', 'spec.tmpl')
spec_base = self._make_fn("spec")
spec_fn = sh.joinpths(self.build_paths['specs'], spec_base)
LOG.debug("Creating spec file %s with params:", spec_fn)
files['sources'].append("%s.tar.gz" % (spec_base))
utils.log_object(params, logger=LOG, level=logging.DEBUG)
sh.write_file(spec_fn, utils.expand_template(content, params))
tar_it(sh.joinpths(self.build_paths['sources'], "%s.tar.gz" % (spec_base)),
spec_base, wkdir=self.build_paths['specs'])
def _build_requirements(self):
return []
def _install_requirements(self):
i_sibling = self.siblings.get('install')
if not i_sibling:
return []
requirements = []
for p in i_sibling.packages:
p = self._match_version_installed(p)
if 'version' in p:
requirements.append("%s = %s" % (p['name'], p['version']))
else:
requirements.append("%s" % (p['name']))
return requirements
def package(self):
self._create_package()
return self.package_dir
class PythonPackager(DependencyPackager):
def __init__(self, *args, **kargs):
DependencyPackager.__init__(self, *args, **kargs)
self._extended_details = None
self._setup_fn = sh.joinpths(self.get_option('app_dir'), 'setup.py')
def _build_requirements(self):
return [
'python',
'python-devel',
# Often used for building c python modules, should not be harmful...
'gcc',
'python-setuptools',
]
def _build_changelog(self):
try:
ch = changelog.RpmChangeLog(self.get_option('app_dir'))
return ch.format_log()
except (excp.AnvilException, IOError):
return ''
def _undefines(self):
undefine_what = DependencyPackager._undefines(self)
if self.get_bool_option('ignore-missing'):
undefine_what.append('__check_files')
return undefine_what
def _gather_files(self):
files = DependencyPackager._gather_files(self)
files['directories'].append("%{python_sitelib}/")
files['files'].append("%{python_sitelib}/")
if not self.get_option('remove_package_bindir'):
files['files'].append("%{_bindir}/")
return files
def _build_details(self):
# See: http://www.rpm.org/max-rpm/s1-rpm-inside-macros.html
b_dets = DependencyPackager._build_details(self)
b_dets['setup'] = '-q -n %{name}-%{version}'
b_dets['action'] = '%{__python} setup.py build'
b_dets['install_how'] = '%{__python} setup.py install --prefix=%{_prefix} --root=%{buildroot}'
b_dets['remove_file'] = self.get_option('remove_file')
return b_dets
def verify(self):
if not sh.isfile(self._setup_fn):
raise excp.PackageException(("Can not package %s since python"
" setup file at %s is missing") % (self.name, self._setup_fn))
def _make_source_archive(self):
with utils.tempdir() as td:
arch_base_name = "%s-%s" % (self.details['name'], self.details['version'])
sh.copytree(self.get_option('app_dir'), sh.joinpths(td, arch_base_name))
arch_tmp_fn = sh.joinpths(td, "%s.tar.gz" % (arch_base_name))
tar_it(arch_tmp_fn, arch_base_name, td)
sh.move(arch_tmp_fn, self.build_paths['sources'])
return "%s.tar.gz" % (arch_base_name)
def _description(self):
describe_cmd = ['python', self._setup_fn, '--description']
(stdout, _stderr) = sh.execute(*describe_cmd, run_as_root=True, cwd=self.get_option('app_dir'))
stdout = stdout.strip()
if stdout:
# RPM apparently rejects descriptions with blank lines (even between content)
descr_lines = []
for line in stdout.splitlines():
sline = line.strip()
if not sline:
continue
else:
descr_lines.append(line)
return descr_lines
return []
@property
def details(self):
base = super(PythonPackager, self).details
if self._extended_details is None:
ext_dets = {
'automatic_dependencies': False,
}
setup_cmd = ['python', self._setup_fn]
replacements = {
'version': '--version',
'license': '--license',
'vendor': '--author',
'url': '--url',
}
# only replace name if it isn't set in the component config file
if not self.get_option("rpm_package_name"):
replacements['name'] = '--name'
for (key, opt) in replacements.items():
cmd = setup_cmd + [opt]
(stdout, _stderr) = sh.execute(*cmd, run_as_root=True, cwd=self.get_option('app_dir'))
stdout = stdout.strip()
if stdout:
ext_dets[key] = stdout
description = self._description()
if description:
ext_dets['description'] = "\n".join(description)
ext_dets['summary'] = utils.truncate_text("\n".join(description[0:1]), 50)
ext_dets['changelog'] = self._build_changelog()
self._extended_details = ext_dets
extended_dets = dict(base)
extended_dets.update(self._extended_details)
return extended_dets
def package(self):
i_sibling = self.siblings.get('install')
pips = []
if i_sibling:
pips.extend(i_sibling.pips)
if pips:
for pip_info in pips:
LOG.warn("Unable to package pip %s dependency in an rpm.", colorizer.quote(pip_info['name']))
return DependencyPackager.package(self)
def tar_it(to_where, what, wkdir):
tar_cmd = ['tar', '-cvzf', to_where, what]
return sh.execute(*tar_cmd, cwd=wkdir)

View File

@ -14,10 +14,14 @@
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import sys
from anvil import exceptions as excp
from anvil import log as logging
from anvil import packager as pack
from anvil.packaging import base
from anvil import shell as sh
from anvil import utils
from anvil.packaging.helpers import yum_helper
@ -28,6 +32,7 @@ YUM_INSTALL = ["install", "-y", "-t"]
YUM_REMOVE = ['erase', '-y', "-t"]
# TODO(aababilov): use it in `Requires:' at YumDependencyHandler
def extract_requirement(pkg_info):
p_name = pkg_info.get('name', '')
p_name = p_name.strip()
@ -46,131 +51,11 @@ class MultiplePackageSolutions(excp.DependencyException):
pass
class YumPackager(pack.Packager):
class YumPackager(base.Packager):
def __init__(self, distro, remove_default=False):
pack.Packager.__init__(self, distro, remove_default)
super(YumPackager, self).__init__(distro, remove_default)
self.helper = yum_helper.Helper()
def _anything_there(self, pkg):
req = extract_requirement(pkg)
whats_installed = self.helper.get_installed(req.name)
if len(whats_installed) == 0:
return None
# Check if whats installed will work, and if it won't
# then hopefully whats being installed will and
# something later doesn't come by and change it...
for p in whats_installed:
if p.verGE(req.package):
return p
# Warn that incompat. versions could be installed...
LOG.warn("There was %s matches to %s found, none satisified our request!",
len(whats_installed), req)
return None
def match_pip_2_package(self, pip_requirement):
possible_pkgs = self._match_pip_name(pip_requirement)
if not possible_pkgs:
return None
def match_version(yum_pkg):
version = str(yum_pkg.version)
if version in pip_requirement:
return True
return False
satisfying_packages = [p for p in possible_pkgs if match_version(p)]
if not satisfying_packages:
return None
# Remove packages with same name and leave the newest there...
non_same_versions_packages = {}
for p in satisfying_packages:
if p.name not in non_same_versions_packages:
non_same_versions_packages[p.name] = [p]
else:
non_same_versions_packages[p.name].append(p)
satisfying_packages = []
for (_, packages) in non_same_versions_packages.items():
if len(packages) == 1:
satisfying_packages.extend(packages)
else:
packages = sorted(packages)
satisfying_packages.append(packages[-1])
if len(satisfying_packages) > 1:
msg = "Multiple satisfying packages found for requirement %s: %s" % (pip_requirement,
", ".join([str(p) for p in satisfying_packages]))
raise MultiplePackageSolutions(msg)
else:
return satisfying_packages[0]
def _match_pip_name(self, pip_requirement):
# See if we can find anything that might work
# by looking at our available yum packages.
all_available = self.helper.get_available()
# Try a few name variations to see if we can find a matching
# rpm for a given pip, using a little apriori knowledge about
# how redhat usually does it...
def is_exact_match(yum_pkg):
possible_names = [
"python-%s" % (pip_requirement.project_name),
"python-%s" % (pip_requirement.key),
]
pkg_name = str(yum_pkg.name)
if skip_packages_named(pkg_name):
return False
if pkg_name in possible_names:
return True
return False
def is_weak_exact_match_name(yum_pkg):
possible_names = [
pip_requirement.project_name,
pip_requirement.key,
"python-%s" % (pip_requirement.project_name),
"python-%s" % (pip_requirement.key),
]
pkg_name = str(yum_pkg.name)
if skip_packages_named(pkg_name):
return False
if pkg_name in possible_names:
return True
return False
def skip_packages_named(name):
# Skip on ones that end with '-doc' or 'src'
name = name.lower()
if name.endswith('doc'):
return True
if name.endswith('-src'):
return True
return False
def is_partial_match_name(yum_pkg):
possible_names = [
pip_requirement.project_name,
pip_requirement.key,
"python-%s" % (pip_requirement.project_name),
"python-%s" % (pip_requirement.key),
]
pkg_name = str(yum_pkg.name)
if skip_packages_named(pkg_name):
return False
for n in possible_names:
if pkg_name.find(n) != -1:
return True
return False
for func in [is_exact_match, is_weak_exact_match_name, is_partial_match_name]:
matches = [p for p in all_available if func(p)]
if len(matches):
return matches
return []
def _execute_yum(self, cmd, **kargs):
yum_cmd = YUM_CMD + cmd
return sh.execute(*yum_cmd, run_as_root=True,
@ -183,17 +68,6 @@ class YumPackager(pack.Packager):
def _remove_special(self, name, info):
return False
def _install_special(self, name, info):
return False
def _install(self, pkg):
req = extract_requirement(pkg)
if self._install_special(req.name, pkg):
return
else:
cmd = YUM_INSTALL + [str(req)]
self._execute_yum(cmd)
def _remove(self, pkg):
req = extract_requirement(pkg)
whats_there = self.helper.get_installed(req.name)
@ -219,3 +93,208 @@ class YumPackager(pack.Packager):
# it does cause problems...
cmd = YUM_REMOVE + [req.name]
self._execute_yum(cmd)
def pre_install(self, pkg, params=None):
"""pre-install is handled in openstack-deps %pre script.
"""
pass
def post_install(self, pkg, params=None):
"""post-install is handled in openstack-deps %post script.
"""
pass
class YumDependencyHandler(base.DependencyHandler):
OPENSTACK_DEPS_PACKAGE_NAME = "openstack-deps"
OPENSTACK_EPOCH = 2
py2rpm_executable = sh.which("py2rpm", ["multipip"])
def __init__(self, distro, root_dir, instances):
super(YumDependencyHandler, self).__init__(distro, root_dir, instances)
self.rpmbuild_dir = sh.joinpths(self.deps_dir, "rpmbuild")
self.deps_repo_dir = sh.joinpths(self.deps_dir, "openstack-deps")
self.deps_src_repo_dir = sh.joinpths(self.deps_dir, "openstack-deps-sources")
self.anvil_repo_filename = sh.joinpths(self.deps_dir, "anvil.repo")
def _epoch_list(self):
return [
"--epoch-list",
] + ["%s==%s" % (name, self.OPENSTACK_EPOCH) for name in self.python_names]
def package(self):
super(YumDependencyHandler, self).package()
self._write_all_deps_package()
self._build_dependencies()
self._build_openstack()
self._create_deps_repo()
def _write_all_deps_package(self):
spec_filename = sh.joinpths(
self.rpmbuild_dir,
"SPECS",
"%s.spec" % self.OPENSTACK_DEPS_PACKAGE_NAME)
for dirname in (self.rpmbuild_dir,
self.deps_repo_dir,
self.deps_src_repo_dir):
sh.deldir(dirname)
sh.mkdir(dirname, recurse=True)
today = datetime.date.today()
spec_content = """Name: %s
Version: %s.%s.%s
Release: 0
License: Apache 2.0
Summary: Python dependencies for OpenStack
BuildArch: noarch
""" % (self.OPENSTACK_DEPS_PACKAGE_NAME, today.year, today.month, today.day)
packages = {}
for inst in self.instances:
try:
for pack in inst.packages:
packages[pack["name"]] = pack
except AttributeError:
pass
scripts = {}
script_map = {
"pre-install": "%pre",
"post-install": "%post",
"pre-uninstall": "%preun",
"post-uninstall": "%postun",
}
for pack_name in sorted(packages.iterkeys()):
pack = packages[pack_name]
spec_content += "Requires: %s\n" % pack["name"]
for script_name in script_map.iterkeys():
try:
script_list = pack[script_name]
except (KeyError, ValueError):
continue
script_body = scripts.get(script_name, "")
script_body = "%s\n# %s\n" % (script_body, pack_name)
for script in script_list:
try:
line = " ".join(
sh.shellquote(word)
for word in script["cmd"])
except (KeyError, ValueError):
continue
if script.get("ignore_failure"):
ignore = " 2>/dev/null || true"
else:
ignore = ""
script_body = "".join((
script_body,
line,
ignore,
"\n"))
scripts[script_name] = script_body
spec_content += "\n%description\n\n"
for script_name in sorted(script_map.iterkeys()):
try:
script_body = scripts[script_name]
except KeyError:
pass
else:
spec_content = "%s\n%s\n%s\n" % (
spec_content,
script_map[script_name],
script_body)
spec_content += "\n%files\n"
sh.write_file(spec_filename, spec_content)
cmdline = [
"rpmbuild", "-ba",
"--define", "_topdir %s" % self.rpmbuild_dir,
spec_filename,
]
LOG.info("Building %s RPM" % self.OPENSTACK_DEPS_PACKAGE_NAME)
sh.execute(*cmdline)
def _build_dependencies(self):
package_files = self.download_dependencies()
if not package_files:
LOG.info("No RPM packages of OpenStack dependencies to build")
return
utils.log_iterable(sorted(package_files), logger=LOG,
header="Building RPM packages from files")
cmdline = [
self.py2rpm_executable,
"--rpm-base",
self.rpmbuild_dir,
] + self._epoch_list() + ["--"] + package_files
out_filename = sh.joinpths(self.deps_dir, "py2rpm.deps.out")
LOG.info("You can watch progress in another terminal with")
LOG.info(" tail -f %s" % out_filename)
with open(out_filename, "w") as out:
try:
sh.execute(*cmdline, stdout_fh=out, stderr_fh=out)
except excp.ProcessExecutionError:
LOG.error("Some packages failed to build.")
LOG.error("That's usually not a big deal,"
" so, you can ignore this fact")
def _build_openstack(self):
utils.log_iterable(sorted(self.package_dirs), logger=LOG,
header="Building RPM packages for directories")
cmdline = [
self.py2rpm_executable,
"--rpm-base",
self.rpmbuild_dir,
] + self._epoch_list() + ["--"] + self.package_dirs
out_filename = sh.joinpths(self.deps_dir, "py2rpm.openstack.out")
LOG.info("You can watch progress in another terminal with")
LOG.info(" tail -f %s" % out_filename)
with open(out_filename, "w") as out:
sh.execute(*cmdline, stdout_fh=out, stderr_fh=out)
def _create_deps_repo(self):
for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "RPMS"),
recursive=True, files_only=True):
sh.move(filename, self.deps_repo_dir, force=True)
for filename in sh.listdir(sh.joinpths(self.rpmbuild_dir, "SRPMS"),
recursive=True, files_only=True):
sh.move(filename, self.deps_src_repo_dir, force=True)
for repo_dir in self.deps_repo_dir, self.deps_src_repo_dir:
cmdline = ["createrepo", repo_dir]
LOG.info("Creating repo at %s" % repo_dir)
sh.execute(*cmdline)
LOG.info("Writing anvil.repo to %s" % self.anvil_repo_filename)
(_fn, content) = utils.load_template('packaging', 'anvil.repo')
params = {"baseurl_bin": "file://%s" % self.deps_repo_dir,
"baseurl_src": "file://%s" % self.deps_src_repo_dir}
sh.write_file(
self.anvil_repo_filename, utils.expand_template(content, params))
def install(self):
super(YumDependencyHandler, self).install()
with sh.Rooted(True):
sh.copy(self.anvil_repo_filename, "/etc/yum.repos.d/")
cmdline = ["yum", "erase", "-y", self.OPENSTACK_DEPS_PACKAGE_NAME]
cmdline.extend(self.nopackages)
sh.execute(*cmdline, run_as_root=True, ignore_exit_code=True,
stdout_fh=sys.stdout, stderr_fh=sys.stderr)
cmdline = ["yum", "clean", "all"]
sh.execute(*cmdline, run_as_root=True)
cmdline = ["yum", "install", "-y", self.OPENSTACK_DEPS_PACKAGE_NAME]
sh.execute(*cmdline, run_as_root=True,
stdout_fh=sys.stdout, stderr_fh=sys.stderr)
cmdline = [self.py2rpm_executable, "--convert"] + self.python_names
rpm_names = []
# run as root since /tmp/pip-build-root must be owned by root
for name in sh.execute(*cmdline, run_as_root=True)[0].splitlines():
# name is "Requires: rpm-name"
try:
rpm_names.append(name.split(":")[1].strip())
except IndexError:
pass
cmdline = ["yum", "install", "-y"] + rpm_names
sh.execute(*cmdline, run_as_root=True,
stdout_fh=sys.stdout, stderr_fh=sys.stderr)

View File

@ -14,6 +14,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import distutils.spawn
import getpass
import grp
import os
@ -28,20 +29,13 @@ import time
import psutil # http://code.google.com/p/psutil/wiki/Documentation
import anvil
from anvil import env
from anvil import exceptions as excp
from anvil import log as logging
LOG = logging.getLogger(__name__)
SHELL_QUOTE_REPLACERS = {
"\"": "\\\"",
"(": "\\(",
")": "\\)",
"$": "\\$",
"`": "\\`",
}
# Locally stash these so that they can not be changed
# by others after this is first fetched...
SUDO_UID = env.get_key('SUDO_UID')
@ -252,21 +246,9 @@ def pipe_in_out(in_fh, out_fh, chunk_size=1024, chunk_cb=None):
def shellquote(text):
# TODO(harlowja) find a better way - since there doesn't seem to be a standard lib that actually works
do_adjust = False
for srch in SHELL_QUOTE_REPLACERS.keys():
if text.find(srch) != -1:
do_adjust = True
break
if do_adjust:
for (srch, replace) in SHELL_QUOTE_REPLACERS.items():
text = text.replace(srch, replace)
if do_adjust or \
text.startswith((" ", "\t")) or \
text.endswith((" ", "\t")) or \
text.find("'") != -1:
text = "\"%s\"" % (text)
if text.isalnum():
return text
return "'%s'" % text.replace("'", "'\\''")
def fileperms(path):
@ -677,9 +659,14 @@ def copytree(src, dst):
return dst
def move(src, dst):
def move(src, dst, force=False):
LOG.debug("Moving: %r => %r" % (src, dst))
if not is_dry_run():
if force:
if isdir(dst):
dst = joinpths(dst, basename(src))
if isfile(dst):
unlink(dst)
shutil.move(src, dst)
return dst
@ -770,3 +757,17 @@ def sleep(winks):
LOG.debug("Not really sleeping for: %s seconds" % (winks))
else:
time.sleep(winks)
def which(name, additional_dirs=None):
full_name = distutils.spawn.find_executable(name)
if full_name:
return full_name
for dir_name in additional_dirs or []:
full_name = joinpths(
dirname(dirname(abspth(anvil.__file__))),
dir_name,
name)
if isfile(full_name):
return full_name
raise excp.FileException("Cannot find %s" % name)

View File

@ -78,10 +78,6 @@ class TraceWriter(object):
what['from'] = uri
self.trace(DOWNLOADED, json.dumps(what))
def pip_installed(self, pip_info):
self._start()
self.trace(PIP_INSTALL, json.dumps(pip_info))
def dirs_made(self, *dirs):
self._start()
for d in dirs:
@ -91,10 +87,6 @@ class TraceWriter(object):
self._start()
self.trace(FILE_TOUCHED, fn)
def package_installed(self, pkg_info):
self._start()
self.trace(PKG_INSTALL, json.dumps(pkg_info))
def app_started(self, name, info_fn, how):
self._start()
data = dict()

View File

@ -555,6 +555,14 @@ def welcome(prog_name='Anvil', version_text=version.version_string()):
print(colorizer.color(slang, 'magenta', bold=True))
return ("-", real_max)
def splitlines_not_empty(text):
for line in text.splitlines():
line = line.strip()
if line:
yield line
def canon_mq_type(mq_type):
mq_type = str(mq_type).lower().strip()
return MQ_TYPES.get(mq_type, 'rabbit')

14
clean-pip Executable file
View File

@ -0,0 +1,14 @@
#!/bin/bash
# this utility removes package installed by pip
# but not by rpm
tmp_dir=$(mktemp -d)
echo "Moving unowned files to $tmp_dir"
for f in /usr/lib*/python*/site-packages/*; do
if ! rpm -qf $f &>/dev/null; then
mv -v $f $tmp_dir/
fi
done

View File

@ -0,0 +1,7 @@
# Settings for component django-openstack-auth
---
# Where we download this from...
get_from: git://github.com/gabrielhurley/django_openstack_auth.git?tag=1.0.10
...

View File

@ -3,6 +3,7 @@
name: rhel
platform_pattern: redhat(.*)|centos(.*)
packager_name: anvil.packaging.yum:YumPackager
dependency_handler: anvil.packaging.yum:YumDependencyHandler
commands:
apache:
name: httpd
@ -10,12 +11,6 @@ commands:
start: service httpd start
status: service httpd status
stop: service httpd stop
git:
branch: git branch
checkout: git checkout
clean: git clean
clone: git clone
reset: git reset
libvirt:
restart: service libvirtd restart
status: service libvirtd status
@ -37,10 +32,6 @@ commands:
stop: service mysqld stop
# Pip command varies depending on the distro
pip: pip-python
# Commands used when setting up python projects
python:
setup: python setup.py develop
unsetup: python setup.py develop --uninstall
# Where component symlinks will go, the component name will become a directory
# under this directory where its configuration files will be connected to there
# actual location.
@ -55,17 +46,15 @@ components:
cinder:
action_classes:
install: anvil.components.cinder:CinderInstaller
package: anvil.packaging.rpm:PythonPackager
running: anvil.components.cinder:CinderRuntime
test: anvil.components.base_testing:PythonTestingComponent
coverage: anvil.components.base_testing:PythonTestingComponent
uninstall: anvil.components.cinder:CinderUninstaller
uninstall: anvil.components.base_install:PythonUninstallComponent
pips:
- name: hp3parclient
cinder-client:
action_classes:
install: anvil.components.base_install:PythonInstallComponent
package: anvil.packaging.rpm:PythonPackager
running: anvil.components.base_runtime:EmptyRuntime
test: anvil.components.base_testing:PythonTestingComponent
coverage: anvil.components.base_testing:PythonTestingComponent
@ -73,7 +62,7 @@ components:
db:
action_classes:
install: anvil.distros.rhel:DBInstaller
package: anvil.packaging.rpm:DependencyPackager
package: anvil.packaging.base:EmptyPackager
running: anvil.components.db:DBRuntime
coverage: anvil.components.base_testing:EmptyTestingComponent
test: anvil.components.base_testing:EmptyTestingComponent
@ -84,7 +73,7 @@ components:
general:
action_classes:
install: anvil.components.pkglist:Installer
package: anvil.packaging.rpm:DependencyPackager
package: anvil.packaging.base:EmptyPackager
running: anvil.components.base_runtime:EmptyRuntime
test: anvil.components.base_testing:EmptyTestingComponent
coverage: anvil.components.base_testing:EmptyTestingComponent
@ -133,193 +122,71 @@ components:
removable: false
- name: python-setuptools
removable: false
pip_to_package:
# Shared pypi packages + their package information.
# Packages is what is really used for installation (the pypi name
# is just for matching since the general section is not python code).
#
# When parsing 'tools/pip-requires' and
# 'tools/test-requires' (if they exist)
# the following map will be used to translate names
# and versions inside those files into distribution
# package names equivalents (if possible)
- name: nose
package:
name: python-nose1.1
packager_name: anvil.distros.rhel:YumPackagerWithRelinks
packager_options:
links:
- source: "/usr/lib/python2.6/site-packages/nose-1*-py2.6.egg/nose"
target: "/usr/lib/python2.6/site-packages/nose"
- source: "/usr/bin/nosetests1.1"
target: "/usr/bin/nosetests"
- name: pastedeploy
package:
name: python-paste-deploy1.5
packager_name: anvil.distros.rhel:YumPackagerWithRelinks
packager_options:
links:
- source: "/usr/lib/python2.6/site-packages/PasteDeploy-1.5*-py2.6.egg/paste/deploy"
target: "/usr/lib/python2.6/site-packages/paste/deploy"
- name: routes
package:
name: python-routes1.12
packager_name: anvil.distros.rhel:YumPackagerWithRelinks
packager_options:
links:
- source: "/usr/lib/python2.6/site-packages/Routes-1.*-py2.6.egg/routes"
target: "/usr/lib/python2.6/site-packages/routes"
- name: sphinx
package:
name: python-sphinx10
packager_name: anvil.distros.rhel:YumPackagerWithRelinks
packager_options:
links:
- source: "/usr/bin/sphinx-1.0-build"
target: "/usr/bin/sphinx-build"
- source: "/usr/bin/sphinx-1.0-quickstart"
target: "/usr/bin/sphinx-quickstart"
- source: "/usr/bin/sphinx-1.0-autogen"
target: "/usr/bin/sphinx-autogen"
- name: webob
package:
name: python-webob1.0
# Need to relink it so that it will work without modifications
# Since new packages in rhel must never use the same names
# as previous ones (this overrides that)
packager_name: anvil.distros.rhel:YumPackagerWithRelinks
packager_options:
links:
- source: "/usr/lib/python2.6/site-packages/WebOb-*-py2.6.egg/webob/"
target: "/usr/lib/python2.6/site-packages/webob"
pips:
# Pips that aren't packages yet (or versions aren't right...)
# and need to be installed by pip instead...
# quantum test-requires Babel>=0.9.6
- name: babel
version: ">=0.9.6"
- name: cliff
- name: coverage
- name: distribute
removable: false
- name: docutils
version: "==0.9.1"
- name: fixtures
- name: keyring # Shared at least by openstack-client, keystone-client (+anvil itself)
removable: false
# quantum pip-requires kombu==1.0.4; RHEL has 1.1.3
- name: kombu
- name: lxml
version: "2.3.5"
options: # Force it to upgrade if its there already
# but versions are miss-matched
- "-U"
removable: false
# depends on python-babel
- name: jinja2
# quantum test-requires mock>=1.0b1; RHEL has 0.8.0
- name: mock
version: ">=1.0b1"
# nova requires netaddr>=0.7.6; RHEL has 0.7.5-4
- name: netaddr
version: ">=0.7.6"
- name: nose-exclude
- name: nosehtmloutput
- name: openstack.nose_plugin
- name: pep8 # The rhel version appears to not be new enough...
- name: pylint # The rhel version appears to not be new enough...
- name: prettytable
version: ">=0.6,<0.7"
- name: pysqlite
options: # Force it to upgrade if its there already
# but versions are miss-matched
- "-U"
- name: pycrypto
version: "2.6"
options: # Force it to upgrade if its there already
# but versions are miss-matched
- "-U"
removable: false
# depends on python-babel
- name: sphinx
- name: python-subunit
nopips:
# these items fail to build from Python packages,
# but their RPMs are available from base and epel repos
- name: requests
version: '0.14.2' # 1.0 seemed to introduce new backwayd incompatible changes, not cool!!
# Need this or nova pukes with 'Did not recognize type 'BIGINT' of column 'bw_in''
- name: sqlalchemy
version: "0.7.9"
options: # Force it to upgrade if its there already
# but versions are miss-matched
- "-U"
- name: sqlalchemy-migrate
- name: testrepository
- name: testtools # Seems like the version in rhel is to old...
- name: mysql-python
- name: pyOpenSSL
# these packages conflict with our deps and must be removed
nopackages:
- name: python-paste-deploy1.5
- name: python-nose1.1
- name: python-routes1.12
- name: python-sphinx10
- name: python-webob1.0
- name: Django14
glance:
action_classes:
install: anvil.components.glance:GlanceInstaller
package: anvil.packaging.rpm:PythonPackager
running: anvil.components.glance:GlanceRuntime
coverage: anvil.components:PythonTestingComponent
test: anvil.components:PythonTestingComponent
uninstall: anvil.components.glance:GlanceUninstaller
test: anvil.components.glance:GlanceTester
uninstall: anvil.components.base_install:PythonUninstallComponent
packages:
- name: MySQL-python
pips:
- name: boto
- name: wsgiref
- name: xattr # Seems to be only in test-requires
# warlock requires jsonschema>=0.7,<2
# pip downloads jsonschema-2.0 and
# then ignores warlock's requirement
- name: jsonschema
version: ">=0.7,<2"
glance-client:
action_classes:
install: anvil.components.glance_client:GlanceClientInstaller
package: anvil.packaging.rpm:PythonPackager
install: anvil.components.base_install:PythonInstallComponent
running: anvil.components.base_runtime:EmptyRuntime
test: anvil.components.glance_client:GlanceClientTester
coverage: anvil.components.glance_client:GlanceClientTester
uninstall: anvil.components.base_install:PythonUninstallComponent
pips:
- name: nosexcover
- name: setuptools-git
- name: discover
- name: warlock
horizon:
action_classes:
install: anvil.distros.rhel:HorizonInstaller
package: anvil.packaging.rpm:PythonPackager
running: anvil.components.horizon:HorizonRuntime
test: anvil.components.base_testing:PythonTestingComponent
coverage: anvil.components.base_testing:PythonTestingComponent
uninstall: anvil.components.horizon:HorizonUninstaller
pip_to_package:
- name: django
package:
name: Django14
packages:
- name: httpd
- name: mod_wsgi
- name: nodejs
pips:
- name: django-openstack-auth
- name: nosexcover
- name: selenium
django-openstack-auth:
action_classes:
install: anvil.components.base_install:PythonInstallComponent
running: anvil.components.base_runtime:EmptyRuntime
test: anvil.components.base_testing:EmptyTestingComponent
uninstall: anvil.components.base_install:PythonUninstallComponent
keystone:
action_classes:
install: anvil.components.keystone:KeystoneInstaller
package: anvil.packaging.rpm:PythonPackager
running: anvil.components.keystone:KeystoneRuntime
test: anvil.components.keystone:KeystoneTester
coverage: anvil.components.keystone:KeystoneTester
uninstall: anvil.components.keystone:KeystoneUninstaller
uninstall: anvil.components.base_install:PythonUninstallComponent
packages:
- name: MySQL-python
pips:
- name: pam
version: '0.1.4'
- name: nosexcover
- name: webtest # This version in package form conflicts with webob1.0
keystone-client:
action_classes:
install: anvil.components.keystone_client:KeystoneClientInstaller
package: anvil.packaging.rpm:PythonPackager
install: anvil.components.base_install:PythonInstallComponent
running: anvil.components.base_runtime:EmptyRuntime
test: anvil.components.base_testing:PythonTestingComponent
coverage: anvil.components.base_testing:PythonTestingComponent
@ -327,7 +194,6 @@ components:
nova:
action_classes:
install: anvil.distros.rhel:NovaInstaller
package: anvil.packaging.rpm:PythonPackager
running: anvil.components.nova:NovaRuntime
test: anvil.components.base_testing:PythonTestingComponent
coverage: anvil.components.base_testing:PythonTestingComponent
@ -352,24 +218,13 @@ components:
removable: false
- name: vconfig
removable: false
pip_to_package:
- name: MySQL-python
package:
name: MySQL-python
pips:
# Why is this still needed??
- name: Cheetah
# This seems to be a core dependency for a 'cas' tool
# so don't try to remove it since it will also remove
# said 'cas' tool, unfortunately the version of paramiko
# installed in rhel uses a old version of crypto which
# other components actually can't use. This sucks...
- name: paramiko
- name: stevedore
- name: discover
- name: psycopg2
- name: suds # The version in rhel doesn't work...
- name: Babel # The version in rhel doesn't work...
subsystems:
compute:
packages:
@ -410,25 +265,24 @@ components:
nova-client:
action_classes:
install: anvil.components.base_install:PythonInstallComponent
package: anvil.packaging.rpm:PythonPackager
running: anvil.components.base_runtime:EmptyRuntime
test: anvil.components.base_testing:PythonTestingComponent
coverage: anvil.components.base_testing:PythonTestingComponent
uninstall: anvil.components.base_install:PythonUninstallComponent
no-vnc:
action_classes:
install: anvil.components.novnc:NoVNCInstaller
package: anvil.components.base_install:EmptyPackagingComponent
install: anvil.components.base_install:PythonInstallComponent
package: anvil.packaging.base:EmptyPackager
running: anvil.components.novnc:NoVNCRuntime
test: anvil.components.base_testing:EmptyTestingComponent
coverage: anvil.components.base_testing:EmptyTestingComponent
uninstall: anvil.components.novnc:NoVNCUninstaller
uninstall: anvil.components.base_install:PythonUninstallComponent
packages:
- name: python-websockify
- name: numpy
openstack-client:
action_classes:
install: anvil.components.openstack_client:OpenStackClientInstaller
package: anvil.packaging.rpm:PythonPackager
running: anvil.components.base_runtime:EmptyRuntime
test: anvil.components.openstack_client:OpenStackClientTester
coverage: anvil.components.openstack_client:OpenStackClientTester
@ -436,7 +290,6 @@ components:
oslo-config:
action_classes:
install: anvil.components.base_install:PythonInstallComponent
package: anvil.packaging.rpm:PythonPackager
running: anvil.components.base_runtime:EmptyRuntime
test: anvil.components.base_testing:PythonTestingComponent
coverage: anvil.components.base_testing:PythonTestingComponent
@ -444,7 +297,6 @@ components:
oslo-incubator:
action_classes:
install: anvil.components.base_install:PythonInstallComponent
package: anvil.packaging.rpm:PythonPackager
running: anvil.components.base_runtime:EmptyRuntime
test: anvil.components.base_testing:PythonTestingComponent
coverage: anvil.components.base_testing:PythonTestingComponent
@ -452,25 +304,21 @@ components:
quantum:
action_classes:
install: anvil.components.quantum:QuantumInstaller
package: anvil.packaging.rpm:PythonPackager
running: anvil.components.quantum:QuantumRuntime
test: anvil.components.base_testing:PythonTestingComponent
coverage: anvil.components.base_testing:PythonTestingComponent
uninstall: anvil.components.quantum:QuantumUninstaller
uninstall: anvil.components.base_install:PythonUninstallComponent
quantum-client:
action_classes:
install: anvil.components.base_install:PythonInstallComponent
package: anvil.packaging.rpm:PythonPackager
running: anvil.components.base_runtime:EmptyRuntime
test: anvil.components.base_testing:PythonTestingComponent
coverage: anvil.components.base_testing:PythonTestingComponent
uninstall: anvil.components.base_install:PythonUninstallComponent
pips:
- name: cliff-tablib
rabbit-mq:
action_classes:
install: anvil.components.rabbit:RabbitInstaller
package: anvil.packaging.rpm:DependencyPackager
package: anvil.packaging.base:EmptyPackager
running: anvil.distros.rhel:RabbitRuntime
test: anvil.components.base_testing:EmptyTestingComponent
coverage: anvil.components.base_testing:EmptyTestingComponent
@ -494,8 +342,7 @@ components:
run_as_root: true
swift-client:
action_classes:
install: anvil.components.swift_client:SwiftClientInstaller
package: anvil.packaging.rpm:PythonPackager
install: anvil.components.base_install:PythonInstallComponent
running: anvil.components.base_runtime:EmptyRuntime
test: anvil.components.base_testing:PythonTestingComponent
coverage: anvil.components.base_testing:PythonTestingComponent

View File

@ -19,6 +19,7 @@ components:
- no-vnc
- nova
- nova-client
- django-openstack-auth
- horizon
options:
no-vnc:

View File

@ -8,6 +8,7 @@ components:
- quantum-client
- swift-client
- cinder-client
- django-openstack-auth
- horizon
options:
horizon:

View File

@ -0,0 +1,10 @@
[anvil]
name=anvil
baseurl=$baseurl_bin
gpgcheck=0
[anvil-src]
name=anvil
baseurl=$baseurl_src
gpgcheck=0
enabled=0

View File

@ -1,143 +0,0 @@
#*
This is a cheetah template for building a basic rpm spec file that can then
later be used with the rpmbuild command.
See: http://www.rpm.org/max-rpm/
http://fedoraproject.org/wiki/How_to_create_an_RPM_package
http://fedoraproject.org/wiki/Packaging:Guidelines
... (many others)
*#
#for $d in $defines
%define ${d}
#end for
#for $d in $undefines
%undefine ${d}
#end for
#
# Spec file for $details.name auto-generated on ${date} by ${who}
#
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
Name: $details.name
Summary: $details.summary
Version: $details.version
Release: $details.release%{?dist}
Packager: $details.packager
#if $details.url
URL: $details.url
#end if
#if $details.vendor
Vendor: $details.vendor
#end if
#set $s_len = len($files.sources)
#for $i in $range(0, $s_len)
Source${i}: $files.sources[$i]
#end for
BuildRoot: %{_tmppath}/%{name}-%{version}-build
License: $details.license
#if $build.has_key('arch')
BuildArch: $build.arch
#end if
#if $details.automatic_dependencies
AutoReqProv: yes
#else
AutoReqProv: no
#end if
#if $conflicts
# Packages we conflict with
#for $i in $conflicts
Conflicts: ${i}
#end for
#end if
#if $obsoletes
# Packages we obsolete
#for $i in $obsoletes
Obsoletes: ${i}
#end for
#end if
#if $requires.build
# Build requirements
#for $i in $requires.build
BuildRequires: ${i}
#end for
#end if
#if $requires.install
# Install requirements
#for $i in $requires.install
Requires: ${i}
#end for
#end if
# Custom patches
#set $size = 0
#for $p in $patches
Patch${size}: $p
#set $size += 1
#end for
%description
#if $details.description
$details.description
#else
$details.summary
#end if
%prep
#if $build.has_key('setup')
%setup $build.setup
#end if
# Custom patches activation
#set $size = 0
#for $p in $patches
%patch${size} -p1
#set $size += 1
#end for
#if $build.has_key('action')
%build
$build.action
#end if
%install
#if $build.has_key('install_how')
$build.install_how
#end if
#if $build.has_key('remove_file')
$build.remove_file
#end if
%files
%defattr(-,root,root,-)
#for $f in $files.files
${f}
#end for
#for $d in $files.docs
%doc ${d}
#end for
#for $d in $files.directories
%dir ${d}
#end for
# extra files
%changelog
$details.changelog

84
multipip/README.rst Normal file
View File

@ -0,0 +1,84 @@
multipip
========
`pip` utility refuses to handle multiple requirements for one package::
$ pip install 'nose>=1.2' 'nose>=2' 'nose<4'
Double requirement given: nose>=2 (already in nose>=1.2, name='nose')
Use `multipip` to join these requirements::
$ multipip 'nose>=1.2' 'nose>=2' 'nose<4'
nose>=2,<4
Files of requirements can be used as well::
$ cat pip-requires
nose<4
$ multipip 'nose>=1.2' 'nose>=2' -r pip-requires
nose>=2,<4
`multipip` prints error messages for incompatible requirements to
stderr and chooses the first one::
$ cat pip-requires
pip==1.3
$ multipip 'pip==1.2' -r pip-requires
pip: incompatible requirements
Choosing:
command line: pip==1.2
Conflicting:
-r pip-requires (line 1): pip==1.3
pip==1.2
It is possible to filter some packages from printed output. This can
be useful for a huge `pip-requires` file::
$ cat pip-requires
nose<4
pip==1.2
nose>=1.2
$ multipip -r pip-requires --ignore-packages nose
pip==1.2
Installed packages can be filtered, too (they are taken from `pip
freeze`)::
$ cat pip-requires
nose<4
pip==1.2
nose>=1.2
$ pip freeze | grep nose
nose==1.1.2
$ multipip -r pip-requires --ignore-installed
pip==1.2
py2rpm
======
Distutils provides an interface for building RPMs::
$ python ./setup.py bdist_rpm
This tool has several problems:
* Red Hat based distros use different package names, e.g.,
`python-setuptools` instead of `distribute`, `python-nose` instead
of `nose` and so on;
* `Requires` and `Conflicts` sections for generated RPM are incorrect;
* sometimes not all required files are packaged;
* miscellaneous problems with man files;
* package directory in `/usr/lib*/python*/site-packages/<pkg>` is not
owned by any RPM;
* some packages (like selenium) are architecture dependent but
`bdist_rpm` generates `BuildArch: noarch` for them.
`py2rpm` is aimed to solve all these problems.
`py2rpm` accepts a list of archive names or package directories and
builds RPMs (current directory is used by default)::
$ py2rpm
...
Wrote: /home/guest/rpmbuild/SRPMS/python-multipip-0.1-1.src.rpm
Wrote: /home/guest/rpmbuild/RPMS/noarch/python-multipip-0.1-1.noarch.rpm
...

333
multipip/multipip Executable file
View File

@ -0,0 +1,333 @@
#!/usr/bin/python
import argparse
import distutils.spawn
import logging
import os
import subprocess
import sys
import pip.index
import pip.req
from pip.vcs import git, mercurial, subversion, bazaar
import pkg_resources
BAD_REQUIREMENTS = 2
INCOMPATIBLE_REQUIREMENTS = 3
logger = logging.getLogger()
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
"-r", "--requirement",
dest="requirements",
nargs="*",
default=[],
metavar="<file>",
help="Install all the packages listed in the given requirements file")
parser.add_argument(
"requirement_specs",
nargs="*",
default=[],
metavar="<requirement specifier>",
help="Install specified package")
parser.add_argument(
# A regex to be used to skip requirements
"--skip-requirements-regex",
default="",
help=argparse.SUPPRESS)
parser.add_argument(
# The default version control system for editables, e.g. 'svn'
'--default-vcs',
dest='default_vcs',
default='',
help=argparse.SUPPRESS)
parser.add_argument(
"--debug", "-d",
action="store_true",
default=False,
help="Print debug information")
parser.add_argument(
"--ignore-installed", "-i",
action="store_true",
default=False,
help="Ignore installed packages")
parser.add_argument(
"--ignore-packages",
nargs="*",
default=[],
metavar="<requirement specifier>",
help="Ignore listed packages")
parser.add_argument(
"--frozen", "-f",
action="store_true",
default=False,
help="Make requirements meet installed packages (taken from pip freeze)")
pip_executable = (distutils.spawn.find_executable("pip") or
distutils.spawn.find_executable("pip-python"))
parser.add_argument(
"--pip",
metavar="<filename>",
default=pip_executable,
help="Full or short name of pip executable (default: %s)" %
pip_executable)
return parser
def setup_logging(options):
level = logging.DEBUG if options.debug else logging.WARNING
handler = logging.StreamHandler(sys.stderr)
logger.addHandler(handler)
logger.setLevel(level)
incompatibles = set()
joined_requirements = []
def install_requirement_ensure_req_field(req):
if not req.req:
# pip 0.8 or so
link = pip.index.Link(req.url)
name = link.egg_fragment
if not name:
raise Exception("Cannot find package name from `%s'" % req.url)
req.req = pkg_resources.Requirement.parse(name)
return req
def install_requirement_str(req):
return req.url or str(req.req)
def install_requirement_parse(line, comes_from):
line = line.strip()
if line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
req = pip.req.InstallRequirement.from_editable(
line, comes_from=comes_from)
else:
req = pip.req.InstallRequirement.from_line(line, comes_from)
return install_requirement_ensure_req_field(req)
def incompatible_requirement(chosen, conflicting):
if chosen.req.key not in incompatibles:
incompatibles.add(chosen.req.key)
print >> sys.stderr, "%s: incompatible requirements" % chosen.req.key
print >> sys.stderr, "Choosing:"
print >> sys.stderr, ("\t%s: %s" %
(chosen.comes_from,
install_requirement_str(chosen)))
print >> sys.stderr, "Conflicting:"
print >> sys.stderr, ("\t%s: %s" %
(conflicting.comes_from,
install_requirement_str(conflicting)))
def parse_requirements(options):
"""Parse package requirements from command line and files.
:return: tuple (all, ignored) of InstallRequirement
"""
all_requirements = {}
for req_spec in options.requirement_specs:
try:
req = install_requirement_parse(req_spec, "command line")
all_requirements.setdefault(req.req.key, []).append(req)
except Exception as ex:
logger.error("Cannot parse `%s': %s" % (req_spec, ex))
sys.exit(BAD_REQUIREMENTS)
for filename in options.requirements:
try:
for req in pip.req.parse_requirements(filename, options=options):
req = install_requirement_ensure_req_field(req)
all_requirements.setdefault(req.req.key, []).append(req)
except Exception as ex:
logger.error("Cannot parse `%s': %s" % (filename, ex))
sys.exit(BAD_REQUIREMENTS)
ignored_requirements = []
for req_spec in options.ignore_packages:
try:
req = install_requirement_parse(req_spec, "command line")
ignored_requirements.append(req)
except Exception as ex:
logger.error("Cannot parse `%s': %s" % (req_spec, ex))
sys.exit(BAD_REQUIREMENTS)
return all_requirements, ignored_requirements
def installed_packages(options):
pip_cmdline = [
options.pip,
"freeze",
]
(package_list, _) = subprocess.Popen(
pip_cmdline, stdout=subprocess.PIPE).communicate()
pkg_list = []
for line in package_list.splitlines():
try:
pkg_list.append(install_requirement_parse(line, "pip freeze").req)
except Exception:
pass
return pkg_list
def join_one_requirement(req_list):
"""Join requirement list for one package together.
Possible returns:
* ==A - exact version (even when there are conflicts)
* >=?A,<=?B,(!=C)+ - line segment (no conflicts detected)
* >=?A,(!=C)+ - more than (also when conflicts detected)
:param:req_list list of pip.req.InstallRequirement
:return: pip.req.InstallRequirement
"""
if len(req_list) == 1:
return req_list[0]
req_strict = None
lower_bound_str = None
lower_bound_version = None
lower_bound_req = None
upper_bound_str = None
upper_bound_version = None
upper_bound_req = None
conflicts = []
for req in req_list:
for spec in req.req.specs:
if spec[0] == "==":
return req
spec_str = "%s%s" % spec
if spec[0] == "!=":
conflicts.append(spec_str)
continue
version = pkg_resources.parse_version(spec[1])
# strict_check is < or >, not <= or >=
strict_check = len(spec[0]) == 1
if spec[0][0] == ">":
if (not lower_bound_version or (version > lower_bound_version) or
(strict_check and version == lower_bound_version)):
lower_bound_version = version
lower_bound_str = spec_str
lower_bound_req = req
else:
if (not upper_bound_version or (version < upper_bound_version) or
(strict_check and version == upper_bound_version)):
upper_bound_version = version
upper_bound_str = spec_str
upper_bound_req = req
if lower_bound_version and upper_bound_version:
bad_bounds = False
if lower_bound_version > upper_bound_version:
upper_bound_str = None
if lower_bound_version == upper_bound_version:
if lower_bound_str[1] == "=" and upper_bound_str[1] == "=":
return pip.req.InstallRequirement.from_line(
"%s==%s" % (req_key, upper_bound_str[2:]),
"compiled")
else:
upper_bound_str = None
req_specs = []
req_key = req_list[0].req.key
if lower_bound_str:
req_specs.append(lower_bound_str)
if upper_bound_str:
req_specs.append(upper_bound_str)
req_specs.extend(conflicts)
return pip.req.InstallRequirement.from_line(
"%s%s" % (req_key, ",".join(req_specs)),
"compiled")
def join_requirements(options):
global joined_requirements
all_requirements, ignored_requirements = parse_requirements(options)
skip_keys = set(pkg.req.key for pkg in ignored_requirements)
installed_by_key = {}
installed_requrements = []
if options.ignore_installed or options.frozen:
installed_requrements = installed_packages(options)
if options.ignore_installed:
skip_keys |= set(pkg.key for pkg in installed_requrements)
if options.frozen:
installed_by_key = dict((pkg.key, pkg) for pkg in installed_requrements)
for req_key, req_list in all_requirements.iteritems():
if req_key in skip_keys:
continue
joined_req = join_one_requirement(req_list)
try:
installed_req = installed_by_key[req_key]
installed_version = installed_req.index[0][0]
except (KeyError, IndexError):
pass
else:
if installed_version not in joined_req.req:
frozen_req = pip.req.InstallRequirement.from_line(
"%s>=%s" % (installed_req.project_name,
installed_req.specs[0][1]),
"pip freeze")
incompatible_requirement(frozen_req, joined_req)
joined_req = frozen_req
joined_requirements.append(joined_req.req)
segment_ok = False
lower_version = None
lower_strict = False
exact_version = None
conflicts = []
for parsed, trans, op, ver in joined_req.req.index:
if op[0] == ">":
lower_version = parsed
lower_strict = len(op) == 2
elif op[0] == "<":
segment_ok = True
elif op[0] == "=":
exact_version = parsed
else:
conflicts.append(parsed)
if exact_version:
for req in req_list:
if not exact_version in req.req:
incompatible_requirement(joined_req, req)
else:
for req in req_list:
for parsed, trans, op, ver in req.req.index:
if op[0] == "=":
if parsed in conflicts:
incompatible_requirement(joined_req, req)
break
elif not segment_ok and op[0] == "<":
# analyse lower bound: x >= A or x > A
if (lower_version > parsed or (
lower_version == parsed and
(lower_strict or len(op) != 2))):
incompatible_requirement(joined_req, req)
break
def print_requirements():
for req in sorted(joined_requirements, key=lambda x: x.key):
print req
def main():
parser = create_parser()
options = parser.parse_args()
setup_logging(options)
join_requirements(options)
print_requirements()
if incompatibles:
sys.exit(INCOMPATIBLE_REQUIREMENTS)
if __name__ == "__main__":
main()

468
multipip/py2rpm Executable file
View File

@ -0,0 +1,468 @@
#!/usr/bin/python
import argparse
import distutils.spawn
import logging
import re
import os
import os.path
import shutil
import subprocess
import sys
import tempfile
import pip.util
import pkg_resources
class InstallationError(Exception):
pass
logger = logging.getLogger()
package_map = {
"django": "Django",
"distribute": "python-setuptools",
"pam": "python-pam",
"pycrypto": "python-crypto",
}
package_names = {}
arch_dependent = [
"selenium",
]
epoch_map = {}
def package_name_python2rpm(python_name):
python_name = python_name.lower()
try:
return package_map[python_name]
except:
pass
python_name = python_name.replace("_", "-").replace(".", "-")
if python_name.startswith("python-"):
prefixed_name = python_name
else:
prefixed_name = "python-%s" % python_name
try:
return package_names[prefixed_name]
except:
pass
try:
return package_names[python_name]
except:
pass
return prefixed_name
setup_py = "setup.py"
def egg_info_path(source_dir, filename):
base = os.path.join(source_dir, "pip-egg-info")
filenames = os.listdir(base)
if not filenames:
raise InstallationError("No files/directories in %s (from %s)"
% (base, filename))
# if we have more than one match, we pick the toplevel one.
if len(filenames) > 1:
filenames.sort(key=lambda x: x.count(os.path.sep) +
(os.path.altsep and
x.count(os.path.altsep) or 0))
return os.path.join(base, filenames[0], filename)
def egg_info_lines(source_dir, filename):
filename = egg_info_path(source_dir, filename)
if not os.path.exists(filename):
return []
with open(filename, "r") as f:
return f.readlines()
_requirements_section_re = re.compile(r'\[(.*?)\]')
def egg_info_requirements(source_dir, extras=()):
in_extra = None
for line in egg_info_lines(source_dir, 'requires.txt'):
match = _requirements_section_re.match(line.lower())
if match:
in_extra = match.group(1)
continue
if in_extra and in_extra not in extras:
# Skip requirement for an extra we aren't requiring
continue
yield line
def setup_py_one_line(source_dir, command):
"""Run `python setup.py $command` and return the last line.
python ldap is so clever that is prints extra stuff
before package name or version. Lets return the last line
"""
return call_subprocess(
[sys.executable, setup_py, command],
cwd=source_dir, show_stdout=False)[0].splitlines()[-1].strip()
def create_parser():
parser = argparse.ArgumentParser()
rpm_base = os.path.expanduser("~/rpmbuild")
source_dir = os.getcwd()
rpmbuild_executable = (distutils.spawn.find_executable("rpmbuild") or
distutils.spawn.find_executable("rpm"))
parser.add_argument(
"--pip-verbose", "-f",
action="store_true",
default=False,
help="Show pip stdout")
parser.add_argument(
"--debug", "-d",
action="store_true",
default=False,
help="Print debug information")
parser.add_argument(
"--source-only", "-s",
action="store_true",
default=False,
help="Only generate source RPM")
parser.add_argument(
"--rpm-base",
metavar="<dir>",
default=rpm_base,
help="rpmbuild directory (default: %s)" % rpm_base)
parser.add_argument(
"--rpmbuild",
metavar="<dir>",
default=rpmbuild_executable,
help="rpmbuild executable (default: %s)" % rpmbuild_executable)
parser.add_argument(
"--convert", "-c",
dest="convert",
metavar="<name>",
nargs="+",
default=[],
help="Python requirement name to be converted to RPM package names")
parser.add_argument(
dest="sources",
metavar="<dir or archive>",
nargs="*",
default=[source_dir],
help="Source directories of packages (default: current directory)")
parser.add_argument(
"--install-script",
metavar="<filename>",
default=None,
help="Specify a script for the INSTALL phase of RPM building")
parser.add_argument(
"--arch-dependent", "-a",
metavar="<Python package name>",
nargs="+",
default=arch_dependent,
help="Known architecture dependent packages")
parser.add_argument(
"--epoch", "-e",
metavar="<number>",
type=int,
default=None,
help="RPM epoch for generated packages")
parser.add_argument(
"--epoch-list", "-l",
metavar="<Python package name == epoch number>",
nargs="+",
default=[],
help="Forced RPM epochs for packages")
return parser
def call_subprocess(cmd, cwd=None, show_stdout=True, raise_on_returncode=True):
if show_stdout:
stdout = None
else:
stdout = subprocess.PIPE
proc = subprocess.Popen(cmd, cwd=cwd, stderr=None, stdin=None, stdout=stdout)
ret = proc.communicate()
if proc.returncode:
cwd = cwd or os.getcwd()
command_desc = " ".join(cmd)
if raise_on_returncode:
raise InstallationError(
"Command %s failed with error code %s in %s"
% (command_desc, proc.returncode, cwd))
else:
logger.warn(
"Command %s had error code %s in %s"
% (command_desc, proc.returncode, cwd))
return ret
def setup_logging(options):
level = logging.DEBUG if options.debug else logging.WARNING
handler = logging.StreamHandler(sys.stderr)
logger.addHandler(handler)
logger.setLevel(level)
def build_name_map():
cmdline = ["yum", "list", "-q"]
try:
yum_list = call_subprocess(cmdline, show_stdout=False)[0]
except Exception as ex:
logging.warning(str(ex))
return
for line in yum_list.split("\n")[1:]:
if line:
line = line.split(None, 1)[0].split(".", 1)[0]
package_names[line.lower()] = line
def build_epoch_map(options):
for epoch_spec in options.epoch_list:
try:
(name, epoch) = epoch_spec.split("==")
name = name.strip().lower()
epoch = epoch.strip()
assert(name and epoch)
except (IndexError, AssertionError):
raise InstallationError("Bad epoch specifier: `%s'" % epoch_spec)
else:
epoch_map[name] = epoch
def run_egg_info(source_dir, options):
script = """
__file__ = __SETUP_PY__
from setuptools.command import egg_info
import pkg_resources
import os
def replacement_run(self):
self.mkpath(self.egg_info)
installer = self.distribution.fetch_build_egg
for ep in pkg_resources.iter_entry_points('egg_info.writers'):
# require=False is the change we're making:
writer = ep.load(require=False)
if writer:
writer(self, ep.name, os.path.join(self.egg_info,ep.name))
self.find_sources()
egg_info.egg_info.run = replacement_run
exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))
"""
script = script.replace('__SETUP_PY__', "'setup.py'")
egg_info_dir = os.path.join(source_dir, 'pip-egg-info')
if not os.path.exists(egg_info_dir):
os.makedirs(egg_info_dir)
egg_base_option = ['--egg-base', 'pip-egg-info']
call_subprocess(
[sys.executable, '-c', script, 'egg_info'] + egg_base_option,
cwd=source_dir,
show_stdout=options.pip_verbose)
VERSION_RE = re.compile(r"^(.*[^.0])(\.0+)*$")
def trim_zeroes(version):
"""RPM mishandles versions like "0.8.0". Make it happy."""
match = VERSION_RE.match(version)
if match:
return match.group(1)
return version
def requires_and_conflicts(req_list, multiline):
rpm_requires = ""
rpm_conflicts = ""
for line in req_list:
try:
req = pkg_resources.Requirement.parse(line)
except:
continue
rpm_name = package_name_python2rpm(req.key)
if not req.specs:
if multiline:
rpm_requires += "\nRequires:"
rpm_requires = "%s %s" % (
rpm_requires, rpm_name)
for spec in req.specs:
# kind in ("==", "<=", ">=", "!=")
kind = spec[0]
version = trim_zeroes(spec[1])
try:
version = "%s:%s" % (epoch_map[req.key], version)
except KeyError:
pass
if kind == "!=":
if multiline:
rpm_conflicts += "\nConflicts:"
rpm_conflicts = "%s %s = %s" % (
rpm_conflicts, rpm_name, version)
continue
if kind == "==":
kind = "="
if multiline:
rpm_requires += "\nRequires:"
rpm_requires = "%s %s %s %s" % (
rpm_requires, rpm_name, kind, version)
return rpm_requires, rpm_conflicts
def build_rpm(options, filename):
if os.path.isfile(filename):
temp_dir = tempfile.mkdtemp('-unpack', 'py2rpm-')
pip.util.unpack_file(filename, temp_dir, None, None)
source_dir = temp_dir
archive_name = filename
elif os.path.isdir(filename):
temp_dir = None
archive_name = None
source_dir = filename
else:
raise InstallationError(
"`%s' is not a regular file nor a directory" % filename)
setup_py = "setup.py"
run_egg_info(source_dir, options)
rpm_requires, rpm_conflicts = requires_and_conflicts(
egg_info_requirements(source_dir), multiline=False)
pkg_name = setup_py_one_line(source_dir, "--name")
build_dir = options.rpm_base
cmdline = [
sys.executable, setup_py, "bdist_rpm",
"--rpm-base", build_dir,
"--source-only",
"--install-script", options.install_script,
]
if rpm_requires:
cmdline += ["--requires", rpm_requires]
if rpm_conflicts:
cmdline += ["--conflicts", rpm_conflicts]
call_subprocess(cmdline, cwd=source_dir, raise_on_returncode=False)
rpm_name = package_name_python2rpm(pkg_name)
spec_name = os.path.join(build_dir, "SPECS", "%s.spec" % pkg_name)
if not os.path.exists(spec_name):
raise InstallationError("`%s' does not exist" % spec_name)
if rpm_name != pkg_name:
old_name = spec_name
spec_name = os.path.join(build_dir, "SPECS", "%s.spec" % rpm_name)
os.rename(old_name, spec_name)
cmdline = [
"sed", "-i",
"-e", "s/^Name:.*$/Name: %s/" % rpm_name,
"-e", "s/%{name}/%{pkg_name}/g",
"-e", "s/^%%define name.*$/%%define pkg_name %s/" % pkg_name,
]
epoch = epoch_map.get(pkg_name.lower(), options.epoch)
if epoch is not None:
cmdline += [
"-e", "s/^Version:/Epoch: %s\\nVersion:/" % epoch,
]
if pkg_name.lower() in options.arch_dependent:
cmdline += [
"-e", "/^BuildArch/d",
]
if archive_name:
cmdline += [
"-e",
"s/^Source0: .*$/Source0: %s/" % os.path.basename(archive_name)
]
shutil.copy(archive_name,
os.path.join(build_dir, "SOURCES"))
call_subprocess(cmdline + [spec_name])
cmdline = [
"sed", "-i", "-r",
"-e", "/%doc/s/ man[^ ]+//",
]
call_subprocess(cmdline + [spec_name])
if options.source_only:
rpmbuild_what = "-bs"
else:
rpmbuild_what = "-ba"
if rpmbuild_what:
call_subprocess(
[options.rpmbuild, rpmbuild_what,
"--define", "_topdir %s" % build_dir,
spec_name])
if temp_dir:
shutil.rmtree(temp_dir)
def main():
parser = create_parser()
options = parser.parse_args()
setup_logging(options)
build_name_map()
build_epoch_map(options)
if options.convert:
rpm_requires, rpm_conflicts = requires_and_conflicts(
options.convert, multiline=True)
if rpm_requires:
print rpm_requires.strip()
if rpm_conflicts:
print rpm_conflicts.strip()
return
install_script_content = """python setup.py install -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES
abspath_installed_files=$(readlink -f INSTALLED_FILES)
(
cd $RPM_BUILD_ROOT
for i in usr/*/python*/site-packages/* usr/bin/*; do
if [ -e "$i" ]; then
sed -i "s@/$i/@DELETE_ME@" "$abspath_installed_files"
echo "/$i"
fi
done
if [ -d usr/man ]; then
rm -rf usr/share/man
mkdir -p usr/share
mv usr/man usr/share/
sed -i "s@/usr/man/@DELETE_ME@" "$abspath_installed_files"
for i in usr/share/man/*; do
echo "/$i/*"
done
fi
) >> GATHERED_FILES
{ sed '/^DELETE_ME/d' INSTALLED_FILES; cat GATHERED_FILES; } | sort -u > INSTALLED_FILES.tmp
mv -f INSTALLED_FILES{.tmp,}
"""
if not options.install_script:
tmp_install_script = tempfile.mkstemp()
options.install_script = tmp_install_script[1]
os.write(tmp_install_script[0], install_script_content)
os.close(tmp_install_script[0])
else:
tmp_install_script = None
options.arch_dependent = set(pkg.lower() for pkg in options.arch_dependent)
failed_pkgs = []
for src in (os.path.abspath(sdir) for sdir in options.sources):
try:
build_rpm(options, src)
except Exception as ex:
failed_pkgs.append((src, ex))
print >> sys.stderr, ex
if tmp_install_script:
os.unlink(tmp_install_script[1])
if failed_pkgs:
print >> sys.stderr, "These packages failed to build:"
for descr in failed_pkgs:
print >> sys.stderr, "%s:\n\t%s" % descr
sys.exit(1)
if __name__ == "__main__":
try:
main()
except Exception as ex:
print >> sys.stderr, ex

112
smithy
View File

@ -2,8 +2,12 @@
shopt -s nocasematch
SMITHY_NAME=$(readlink -f "$0")
cd "$(dirname "$0")"
YUM_OPTS="--assumeyes --nogpgcheck"
PIP_CMD="pip-python"
PIP_CMD=""
PY2RPM_CMD="$PWD/multipip/py2rpm"
# Source in our variables (or overrides)
source ".anvilrc"
@ -17,17 +21,48 @@ if [ -n "$SUDO_USER" ]; then
fi
fi
PWD=`pwd`
if [ -z "$BOOT_FILES" ]; then
BOOT_FN=".anvil_bootstrapped"
BOOT_FILES="${PWD}/$BOOT_FN"
fi
conflicts() {
echo "Removing conflicting packages $(echo $@)"
yum erase -y $@
}
find_pip()
{
if [ -n "$PIP_CMD" ]; then
return
fi
PIP_CMD=""
for name in pip pip-python; do
if which "$name" &>/dev/null; then
PIP_CMD=$name
break
fi
done
if [ -z "$PIP_CMD" ]; then
echo "pip or pip-python not found"
exit 1
fi
}
rpm_is_installed()
{
local name="$(basename "$1")"
rpm -q "${name%.rpm}" &>/dev/null
}
cache_and_install_rpm_url()
{
url=${1:?"Error: rpm uri is undefined!"}
cachedir=${RPM_CACHEDIR:-'/tmp'}
rpm=$(basename $url)
if rpm_is_installed "$rpm"; then
return
fi
if [ ! -f "$cachedir/$rpm" ]; then
echo "Downloading $rpm to $cachedir..."
curl -s $url -o "$cachedir/$rpm" || return 1
@ -38,24 +73,29 @@ cache_and_install_rpm_url()
install_rpm()
{
rpmstr=${1:?"Error: rpm to install is undefined!"}
rpm=$rpmstr
[ $(dirname $rpm) = '.' ] || rpm=$(rpm -qp $rpmstr 2> /dev/null )
rpm -q $rpm > /dev/null 2>&1 && return 0
echo "Installing rpm requirement '$rpm'"
yum install $YUM_OPTS "$rpmstr" 2>&1
return $?
}
local rpm_path=$1
local py_name=$2
install_pypi()
{
pypi=${1:?"Error: pypi to install is undefined!"}
# TODO: Figure out a way to make pypi installation idempotent --
# in the simple case we can simply return true if the package
# appears in the output of 'pip freeze' but this doesn't handle
# the 'pkg>=1.0' syntax. -I explicitly reinstalls.
$PIP_CMD install -U -I $pypi
return $?
if [ -n "$rpm_path" ]; then
# install or update package
yum install $YUM_OPTS "$rpm_path" && return 0
fi
if [ -z "$py_name" ]; then
return 1
fi
# RPM is not available. Try to build it on fly
pip_tmp_dir=$(mktemp -d)
find_pip
$PIP_CMD install -U -I $py_name --download "$pip_tmp_dir"
echo "Building RPM for $py_name"
rpm_names=$("$PY2RPM_CMD" "$pip_tmp_dir/"* 2>/dev/null |
awk '/^Wrote: /{ print $2 }' | grep -v '.src.rpm' | sort -u)
rm -rf "$pip_tmp_dir"
if [ -z "$rpm_names" ]; then
echo "No binary RPM was built for $py_name"
return 1
fi
yum install $YUM_OPTS $rpm_names
}
bootstrap_epel()
@ -69,30 +109,26 @@ bootstrap_packages()
{
[ -z "$PACKAGES" ] && return 0
for pkg in $PACKAGES; do
format=$(echo $pkg | cut -d: -f1)
name=$(echo $pkg | cut -d: -f2)
echo "Installing $format requirement '$name'"
install_$format $name
if [ $? != 0 ]; then
echo "Error: Installation of $format package '$name' failed!"
return $?
local rpm_name=$(echo $pkg | cut -d: -f1)
local py_name=$(echo $pkg | cut -d: -f2)
install_rpm $rpm_name $py_name
install_status=$?
if [ "$install_status" != 0 ]; then
echo "Error: Installation of package '$rpm_name' failed!"
return "$install_status"
fi
done
}
require()
{
format=${1?"Error: Specify a format as the first arg to require!"}
name=${2?"Error: No name specified for required $format"}
case "$format" in
rpm|pypi)
PACKAGES="$PACKAGES $format:$name"
;;
*)
echo "Error: Smithy does not know how to handle $format requirements!"
local rpm_name=$1
local py_name=$2
if [ -z "$rpm_name" -a -z "$py_name" ]; then
echo "Please specify at RPM or Python package name"
exit 1
;;
esac
fi
PACKAGES="$PACKAGES $rpm_name:$py_name"
}
needs_bootstrap()
@ -186,13 +222,13 @@ if ! needs_bootstrap; then
run_smithy
elif ! $BOOTSTRAP; then
echo "This system needs to be updated in order to run anvil!" >&2
echo "Running 'sudo smithy --bootstrap' will attempt to do so." >&2
echo "Running 'sudo $SMITHY_NAME --bootstrap' will attempt to do so." >&2
exit 1
fi
## Bootstrap smithy
if [ "$(id -u)" != "0" ]; then
echo "You must run 'smithy --bootstrap' with root privileges!" >&2
echo "You must run '$SMITHY_NAME --bootstrap' with root privileges!" >&2
exit 1
fi
if [ ! -f $BSCONF_FILE ]; then

View File

@ -1,24 +1,6 @@
# -*- sh -*-
## Bootstrap for CentOS Linux 6.x
SHORTNAME=CENTOS
MIN_RELEASE=6.0
STEPS="epel packages"
EPEL_RPM_URL="http://mirrors.kernel.org/fedora-epel/6/i386/epel-release-6-8.noarch.rpm"
## Package Requirements (Order matters!)
require rpm PyYAML
require rpm gcc
require rpm git
require rpm pylint
require rpm python
require rpm python-devel
require rpm python-iso8601
require rpm python-netifaces
require rpm python-ordereddict
require rpm python-pip
require rpm python-progressbar
require rpm python-psutil
require rpm python-iniparse
require rpm patch
require pypi termcolor
require pypi hgtools
require pypi keyring
require pypi Cheetah
source "$BSCONF_DIR/CommonRedHat"

View File

@ -0,0 +1,45 @@
# -*- sh -*-
STEPS="epel packages"
EPEL_RPM_URL="http://mirrors.kernel.org/fedora-epel/6/i386/epel-release-6-8.noarch.rpm"
## Bootstrap for Red Hat based distros
conflicts 'python-paste-deploy1.5
python-nose1.1
python-routes1.12
python-sphinx10
python-webob1.0
Django14'
## Package Requirements (Order matters!)
require PyYAML
require gcc
require git
require patch
require python
require python-devel
require python-argparse
require python-iso8601
require python-netifaces
require python-ordereddict
require python-progressbar
require python-psutil
require python-iniparse
require pylint
require createrepo
# multipip dependencies
require rpm-build
require python-pip
require python-setuptools
# Build dependencies
require sqlite-devel
require mysql-devel
require postgresql-devel
require openldap-devel
require libxml2-devel
require libxslt-devel
# This packages can be built from archives
require python-cheetah Cheetah
require python-keyring keyring
require python-termcolor termcolor

View File

@ -1,22 +1,6 @@
# -*- sh -*-
## Bootstrap OEL 6.3+ for Openstack Anvil
SHORTNAME=OEL
MIN_RELEASE=6.3
STEPS="epel packages"
EPEL_RPM_URL="http://mirrors.kernel.org/fedora-epel/6/i386/epel-release-6-8.noarch.rpm"
## Package Requirements (Order matters!)
require rpm PyYAML
require rpm gcc
require rpm git
require rpm pylint
require rpm python
require rpm python-iso8601
require rpm python-netifaces
require rpm python-ordereddict
require rpm python-pip
require rpm python-progressbar
require rpm python-psutil
require pypi termcolor
require pypi iniparse
require pypi hgtools
require pypi keyring
require pypi Cheetah
source "$BSCONF_DIR/CommonRedHat"

View File

@ -1,22 +1,6 @@
# -*- sh -*-
## Bootstrap for Redhat Enterprise Linux 6.x
SHORTNAME=RHEL
MIN_RELEASE=6.0
STEPS="epel packages"
EPEL_RPM_URL="http://mirrors.kernel.org/fedora-epel/6/i386/epel-release-6-8.noarch.rpm"
## Package Requirements (Order matters!)
require rpm PyYAML
require rpm gcc
require rpm git
require rpm pylint
require rpm python
require rpm python-iso8601
require rpm python-netifaces
require rpm python-ordereddict
require rpm python-pip
require rpm python-progressbar
require rpm python-psutil
require rpm python-iniparse
require pypi termcolor
require pypi hgtools
require pypi keyring
require pypi Cheetah
source "$BSCONF_DIR/CommonRedHat"