Begin building virtualenvs for each component

To get this to work you'll need to install the following before running:

sudo yum install libxslt-devel libxml2-devel mysql-devel \
                 postgresql-devel openldap-devel psmisc \
                 sqlite-devel libvirt-devel

To have anvil build virtualenv apply the jsonpatch file anvil includes
in conf/distros to the distro file using the --distros-patch option.

sudo ./smithy --action=prepare --distros-patch=conf/distros/venv-patch.json

A separate virtualenv will be built for every component listed in
personas file.  Requirements will not be shared across virtualenv.

o add --venv-deploy-dir option to make the virtualenv relocatable
  to a path different from the build path
o add --origins-patch to apply the jsonpatch file anvil includes
  in conf/origins to the origins file in order to skip bundling
  test requirements into the virtualenv

Change-Id: I47fdb68e71c3114f9cf441b3b33be9c7591356aa
This commit is contained in:
Joshua Harlow 2014-10-06 17:29:04 -07:00
parent b7719ce924
commit d435f54827
13 changed files with 276 additions and 13 deletions

View File

@ -96,7 +96,8 @@ def run(args):
ensure_anvil_dirs(root_dir)
# Load the distro/s
possible_distros = distro.load(settings.DISTRO_DIR)
possible_distros = distro.load(settings.DISTRO_DIR,
distros_patch=args.get('distros_patch'))
# Load + match the persona to the possible distros...
try:
@ -104,7 +105,8 @@ def run(args):
except Exception as e:
raise excp.OptionException("Error loading persona file: %s due to %s" % (persona_fn, e))
else:
dist = persona_obj.match(possible_distros, args['origins_fn'])
dist = persona_obj.match(possible_distros, args['origins_fn'],
origins_patch=args.get('origins_patch'))
LOG.info('Persona selected distro: %s from %s possible distros',
colorizer.quote(dist.name), len(possible_distros))

View File

@ -192,7 +192,9 @@ class Action(object):
sibling_params['siblings'] = {} # This gets adjusted during construction
sibling_params['passwords'] = self.passwords
sibling_params['distro'] = self.distro
sibling_params['options'] = self.config_loader.load(d_component, c)
sibling_params['options'] = self.config_loader.load(
distro=d_component, component=c,
origins_patch=self.cli_opts.get('origins_patch'))
LOG.debug("Constructing %r %s siblings...", c, len(d_component.siblings))
my_siblings = self._construct_siblings(c, d_component.siblings, sibling_params, sibling_instances)
@ -201,8 +203,9 @@ class Action(object):
# siblings get...
instance_params = dict(sibling_params)
instance_params['instances'] = instances
instance_params['options'] = self.config_loader.load(d_component, c,
persona)
instance_params['options'] = self.config_loader.load(
distro=d_component, component=c, persona=persona,
origins_patch=self.cli_opts.get('origins_patch'))
instance_params['siblings'] = my_siblings
instance_params = utils.merge_dicts(instance_params, self.cli_opts, preserve=True)
instances[c] = importer.construct_entry_point(d_component.entry_point, **instance_params)

View File

@ -14,6 +14,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import jsonpatch
import re
from anvil import exceptions
@ -66,7 +67,7 @@ class YamlMergeLoader(object):
persona_specific = persona.component_options.get(component, {})
self._base_loader.update_cache(conf, persona_specific)
def load(self, distro, component, persona=None):
def load(self, distro, component, persona=None, origins_patch=None):
# NOTE (vnovikov): applying takes place before loading reference links
self._apply_persona(component, persona)
@ -75,7 +76,11 @@ class YamlMergeLoader(object):
origins_opts = {}
if self._origins_path:
try:
origins_opts = utils.load_yaml(self._origins_path)[component]
origins = utils.load_yaml(self._origins_path)
if origins_patch:
patch = jsonpatch.JsonPatch(origins_patch)
patch.apply(origins, in_place=True)
origins_opts = origins[component]
except KeyError:
pass
general_component_opts = self._base_loader.load('general')

View File

@ -18,6 +18,7 @@
import collections
import copy
import glob
import jsonpatch
import os
import platform
import re
@ -146,8 +147,14 @@ def _match_distros(distros):
return matches
def load(path):
def load(path, distros_patch=None):
"""Load configuration for all distros found in path.
:param path: path containing distro configuration in yaml format
:param distros_patch: distros file patch, jsonpath format (rfc6902)
"""
distro_possibles = []
patch = jsonpatch.JsonPatch(distros_patch) if distros_patch else None
input_files = glob.glob(sh.joinpths(path, '*.yaml'))
if not input_files:
raise excp.ConfigException('Did not find any distro definition files in %r' % path)
@ -155,6 +162,9 @@ def load(path):
LOG.debug("Attempting to load distro definition from %r", fn)
try:
cls_kvs = utils.load_yaml(fn)
# Apply any user specified patches to distros file
if patch:
patch.apply(cls_kvs, in_place=True)
except Exception as err:
LOG.warning('Could not load distro definition from %r: %s', fn, err)
else:

View File

@ -16,6 +16,7 @@
from StringIO import StringIO
import json
import multiprocessing
import textwrap
@ -165,6 +166,20 @@ def parse(previous_settings=None):
metavar="FILE",
help="yaml file describing where to get openstack sources "
"from (default: %default)")
base_group.add_option("--origins-patch",
action="store",
type="string",
dest="origins_patch_fn",
default=None,
metavar="FILE",
help="origins file patch, jsonpath format (rfc6902)")
base_group.add_option("--distros-patch",
action="store",
type="string",
dest="distros_patch_fn",
default=None,
metavar="FILE",
help="distros file patch, jsonpath format (rfc6902)")
base_group.add_option("-j", "--jobs",
action="store",
type="int",
@ -201,6 +216,14 @@ def parse(previous_settings=None):
default=False,
help=("when packaging only store /usr directory"
" (default: %default)"))
build_group.add_option("--venv-deploy-dir",
action="store",
type="string",
dest="venv_deploy_dir",
default=None,
help=("for virtualenv builds, make the virtualenv "
"relocatable to a directory different from "
"build directory"))
parser.add_option_group(build_group)
test_group = OptionGroup(parser, "Test specific options")
@ -227,6 +250,13 @@ def parse(previous_settings=None):
values['origins_fn'] = options.origins_fn
values['verbose'] = options.verbose
values['usr_only'] = options.usr_only
if options.origins_patch_fn:
with open(options.origins_patch_fn) as fp:
values['origins_patch'] = json.load(fp)
if options.distros_patch_fn:
with open(options.distros_patch_fn) as fp:
values['distros_patch'] = json.load(fp)
values['venv_deploy_dir'] = options.venv_deploy_dir
values['prompt_for_passwords'] = options.prompt_for_passwords
values['show_amount'] = max(0, options.show_amount)
values['store_passwords'] = options.store_passwords

167
anvil/packaging/venv.py Normal file
View File

@ -0,0 +1,167 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import itertools
import os
import tarfile
import six
from anvil import colorizer
from anvil import env
from anvil import log as logging
from anvil import shell as sh
from anvil import utils
from anvil.packaging import base
from anvil.packaging.helpers import pip_helper
LOG = logging.getLogger(__name__)
# TODO(harlowja): think we can remove this...
class VenvInstallHelper(base.InstallHelper):
def pre_install(self, pkg, params=None):
pass
def post_install(self, pkg, params=None):
pass
class VenvDependencyHandler(base.DependencyHandler):
# PBR seems needed everywhere...
_PREQ_PKGS = frozenset(['pbr'])
def __init__(self, distro, root_dir, instances, opts):
super(VenvDependencyHandler, self).__init__(distro, root_dir,
instances, opts)
self.cache_dir = sh.joinpths(self.root_dir, "pip-cache")
def _venv_directory_for(self, instance):
return sh.joinpths(instance.get_option('component_dir'), 'venv')
def _install_into_venv(self, instance, requirements):
venv_dir = self._venv_directory_for(instance)
base_pip = [sh.joinpths(venv_dir, 'bin', 'pip')]
env_overrides = {
'PATH': os.pathsep.join([sh.joinpths(venv_dir, "bin"),
env.get_key('PATH', default_value='')]),
'VIRTUAL_ENV': venv_dir,
}
sh.mkdirslist(self.cache_dir, tracewriter=self.tracewriter)
def try_install(attempt, requirements):
cmd = list(base_pip) + ['install']
cmd.extend([
'--download-cache',
self.cache_dir,
])
if isinstance(requirements, six.string_types):
cmd.extend([
'--requirement',
requirements
])
else:
for req in requirements:
cmd.append(str(req))
sh.execute(cmd, env_overrides=env_overrides)
# Sometimes pip fails downloading things, retry it when this happens...
utils.retry(3, 5, try_install, requirements=requirements)
def _is_buildable(self, instance):
app_dir = instance.get_option('app_dir')
if app_dir and sh.isdir(app_dir) and hasattr(instance, 'egg_info'):
return True
return False
def _replace_deployment_paths(self, root_dirn, pattstr):
for root, _, filenames in os.walk(root_dirn):
for fn in filenames:
cmd = ['sed', '--in-place', pattstr, os.path.join(root, fn)]
sh.execute(cmd=cmd, shell=False)
def package_finish(self):
super(VenvDependencyHandler, self).package_finish()
for instance in self.instances:
if not self._is_buildable(instance):
continue
venv_dir = sh.abspth(self._venv_directory_for(instance))
# Replace paths with virtualenv deployment directory.
if self.opts.get('venv_deploy_dir'):
deploy_dir = os.path.join(self.opts.get('venv_deploy_dir'),
instance.name)
pattstr = ('s#{searchstr}#{replacestr}#g'.format(
searchstr=instance.get_option('component_dir'),
replacestr=deploy_dir))
bin_dir = os.path.join(venv_dir, 'bin')
self._replace_deployment_paths(bin_dir, pattstr)
# Create a tarball containing the virtualenv.
tar_filename = sh.joinpths(venv_dir, '%s-venv.tar.gz' % instance.name)
LOG.info("Making tarball of %s built for %s at %s", venv_dir,
instance.name, tar_filename)
with contextlib.closing(tarfile.open(tar_filename, "w:gz")) as tfh:
for path in sh.listdir(venv_dir, recursive=True):
tfh.add(path, recursive=False, arcname=path[len(venv_dir):])
def package_start(self):
super(VenvDependencyHandler, self).package_start()
for instance in self.instances:
if not self._is_buildable(instance):
continue
# Create a virtualenv...
venv_dir = self._venv_directory_for(instance)
sh.mkdirslist(venv_dir, tracewriter=self.tracewriter)
cmd = ['virtualenv', '--clear', venv_dir]
LOG.info("Creating virtualenv at %s", colorizer.quote(venv_dir))
sh.execute(cmd)
if self._PREQ_PKGS:
self._install_into_venv(instance, self._PREQ_PKGS)
def package_instance(self, instance):
# Skip things that aren't python...
if self._is_buildable(instance):
requires_what = self._filter_download_requires()
requires_keys = set()
for req in requires_what:
if isinstance(req, six.string_types):
req = pip_helper.extract_requirement(req)
requires_keys.add(req.key)
egg_info = getattr(instance, 'egg_info', None)
if egg_info is not None:
# Ensure we have gotten all the things...
test_dependencies = (egg_info.get('test_dependencies', [])
if instance.get_bool_option(
'use_tests_requires', default_value=True)
else [])
for req in itertools.chain(egg_info.get('dependencies', []),
test_dependencies):
if isinstance(req, six.string_types):
req = pip_helper.extract_requirement(req)
if req.key not in requires_keys:
requires_what.append(req)
requires_keys.add(req.key)
self._install_into_venv(instance, requires_what)
self._install_into_venv(instance, [instance.get_option('app_dir')])
else:
LOG.warn("Skipping building %s (not python)",
colorizer.quote(instance.name, quote_color='red'))
def download_dependencies(self):
pass

View File

@ -473,8 +473,8 @@ class YumDependencyHandler(base.DependencyHandler):
else:
yum_provided.append((req, rpm_info))
LOG.info(("Dependency %s was downloaded additionally "
"but it can be satisfied by %s from repository "
"%s instead."), colorizer.quote(req),
"but it can be satisfied by %s from repository "
"%s instead."), colorizer.quote(req),
colorizer.quote(rpm_name),
colorizer.quote(rpm_info['repo']))
return (filtered_files, yum_provided)

View File

@ -14,6 +14,8 @@
# License for the specific language governing permissions and limitations
# under the License.
import jsonpatch
import six
from anvil import colorizer
@ -33,9 +35,13 @@ class Persona(object):
self.component_options = kargs.get('options') or {}
self.no_origins = kargs.get('no-origin') or []
def match(self, distros, origins_fn):
def match(self, distros, origins_fn, origins_patch=None):
# Filter out components that are disabled in origins file
origins = utils.load_yaml(origins_fn)
# Apply any user specified patches to origins file
if origins_patch:
patch = jsonpatch.JsonPatch(origins_patch)
patch.apply(origins, in_place=True)
for c in self.wanted_components:
if c not in origins:
if c in self.no_origins:

View File

@ -122,6 +122,7 @@ def execute(cmd,
if env_overrides and len(env_overrides):
process_env = env.get()
for k, v in env_overrides.items():
LOG.debug("Using environment override '%s' => '%s'", k, v)
process_env[k] = str(v)
# Run command process.

View File

@ -121,8 +121,7 @@ def expand_template(contents, params):
tpl = Template(source=str(contents),
searchList=[params],
compilerSettings={
'useErrorCatcher': True,
})
'useErrorCatcher': True})
return tpl.respond()

View File

@ -0,0 +1,12 @@
[
{
"path": "/install_helper",
"value": "anvil.packaging.venv:VenvInstallHelper",
"op": "replace"
},
{
"path": "/dependency_handler",
"value": { "name": "anvil.packaging.venv:VenvDependencyHandler" },
"op": "replace"
}
]

View File

@ -0,0 +1,27 @@
[
{ "path": "/ceilometer/use_tests_requires", "value": false, "op": "add" },
{ "path": "/ceilometer-client/use_tests_requires", "value": false, "op": "add" },
{ "path": "/cinder-client/use_tests_requires", "value": false, "op": "add" },
{ "path": "/cinder/use_tests_requires", "value": false, "op": "add" },
{ "path": "/django-openstack-auth/use_tests_requires", "value": false, "op": "add" },
{ "path": "/glance-client/use_tests_requires", "value": false, "op": "add" },
{ "path": "/glance/use_tests_requires", "value": false, "op": "add" },
{ "path": "/heat/use_tests_requires", "value": false, "op": "add" },
{ "path": "/heat-client/use_tests_requires", "value": false, "op": "add" },
{ "path": "/horizon/use_tests_requires", "value": false, "op": "add" },
{ "path": "/keystone-client/use_tests_requires", "value": false, "op": "add" },
{ "path": "/keystone/use_tests_requires", "value": false, "op": "add" },
{ "path": "/nova-client/use_tests_requires", "value": false, "op": "add" },
{ "path": "/nova/use_tests_requires", "value": false, "op": "add" },
{ "path": "/novnc/use_tests_requires", "value": false, "op": "add" },
{ "path": "/openstack-client/use_tests_requires", "value": false, "op": "add" },
{ "path": "/oslo-config/use_tests_requires", "value": false, "op": "add" },
{ "path": "/oslo-incubator/use_tests_requires", "value": false, "op": "add" },
{ "path": "/neutron-client/use_tests_requires", "value": false, "op": "add" },
{ "path": "/neutron/use_tests_requires", "value": false, "op": "add" },
{ "path": "/swift-client/use_tests_requires", "value": false, "op": "add" },
{ "path": "/trove-client/use_tests_requires", "value": false, "op": "add" },
{ "path": "/trove/use_tests_requires", "value": false, "op": "add" },
{ "path": "/oslo-messaging/use_tests_requires", "value": false, "op": "add" },
{ "path": "/pycadf/use_tests_requires", "value": false, "op": "add" }
]

View File

@ -10,3 +10,4 @@ PyYAML>=3.1.0
six>=1.4.1
termcolor
argparse
jsonpatch>=1.1