Add pycadf + fix oslo.messaging requirement

Pycadf pulls in an older version of oslo.messaging, as
does most of the other packages. Since 1.3.0 is released
and we are building it allow the dependency resolver/adjuster
to correctly adjust those packages to use the specified
version instead of the one found in the requirements files.

Change-Id: I59b607839a8b65274a08a4903cafb98ab8900870
This commit is contained in:
Joshua Harlow 2014-04-07 15:00:00 -07:00
parent 6f47cd92e8
commit 38d0beb73e
12 changed files with 151 additions and 26 deletions

81
anvil/decorators.py Normal file
View File

@ -0,0 +1,81 @@
# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
# Various useful decorators...
#
# Copyright 2011 Christopher Arndt, MIT License
#
# https://wiki.python.org/moin/PythonDecoratorLibrary#Cached_Properties
# pylint: disable=C0103
class cached_property(object):
'''Decorator for read-only properties evaluated only once within TTL period.
It can be used to created a cached property like this::
import random
# the class containing the property must be a new-style class
class MyClass(object):
# create property whose value is cached for ten minutes
@cached_property(ttl=600)
def randint(self):
# will only be evaluated every 10 min. at maximum.
return random.randint(0, 100)
The value is cached in the '_cache' attribute of the object instance that
has the property getter method wrapped by this decorator. The '_cache'
attribute value is a dictionary which has a key for every property of the
object which is wrapped by this decorator. Each entry in the cache is
created only when the property is accessed for the first time and is a
two-element tuple with the last computed property value and the last time
it was updated in seconds since the epoch.
The default time-to-live (TTL) is 300 seconds (5 minutes). Set the TTL to
zero for the cached value to never expire.
To expire a cached property value manually just do::
del instance._cache[<property name>]
'''
def __init__(self, ttl=300):
self.ttl = ttl
self.fget = None
def __call__(self, fget, doc=None):
self.fget = fget
self.__doc__ = doc or fget.__doc__ # pylint: disable=W0201
self.__name__ = fget.__name__ # pylint: disable=W0201
self.__module__ = fget.__module__ # pylint: disable=W0201
return self
def __get__(self, inst, owner):
now = time.time()
try:
value, last_update = inst._cache[self.__name__]
if self.ttl > 0 and now - last_update > self.ttl:
raise AttributeError
except (KeyError, AttributeError):
value = self.fget(inst)
try:
cache = inst._cache
except AttributeError:
cache = inst._cache = {}
cache[self.__name__] = (value, now)
return value

View File

@ -21,6 +21,7 @@
import collections import collections
from anvil import colorizer from anvil import colorizer
from anvil import decorators
from anvil import exceptions as exc from anvil import exceptions as exc
from anvil import log as logging from anvil import log as logging
from anvil.packaging.helpers import pip_helper from anvil.packaging.helpers import pip_helper
@ -75,8 +76,6 @@ class DependencyHandler(object):
self.forced_packages = [] self.forced_packages = []
# Instances to there app directory (with a setup.py inside) # Instances to there app directory (with a setup.py inside)
self.package_dirs = self._get_package_dirs(instances) self.package_dirs = self._get_package_dirs(instances)
# Instantiate this as late as we can.
self._python_names = None
# Track what file we create so they can be cleaned up on uninstall. # Track what file we create so they can be cleaned up on uninstall.
trace_fn = tr.trace_filename(self.root_dir, 'deps') trace_fn = tr.trace_filename(self.root_dir, 'deps')
self.tracewriter = tr.TraceWriter(trace_fn, break_if_there=False) self.tracewriter = tr.TraceWriter(trace_fn, break_if_there=False)
@ -95,17 +94,19 @@ class DependencyHandler(object):
else: else:
self.ignore_pips = set(ignore_pips) self.ignore_pips = set(ignore_pips)
@decorators.cached_property(ttl=0)
def _python_eggs(self):
egg_infos = []
for i in self.instances:
try:
egg_infos.append(dict(i.egg_info))
except AttributeError:
pass
return egg_infos
@property @property
def python_names(self): def python_names(self):
if self._python_names is None: return [e['name'] for e in self._python_eggs]
names = []
for i in self.instances:
try:
names.append(i.egg_info['name'])
except AttributeError:
pass
self._python_names = names
return self._python_names
@staticmethod @staticmethod
def _get_package_dirs(instances): def _get_package_dirs(instances):
@ -117,19 +118,24 @@ class DependencyHandler(object):
return package_dirs return package_dirs
def package_start(self): def package_start(self):
create_requirement = pip_helper.create_requirement
def gather_extras(instance):
pips = []
for p in instance.get_option("pips", default_value=[]):
req = create_requirement(p['name'], p.get('version'))
pips.append(str(req))
requires_files = list(getattr(instance, 'requires_files', []))
if instance.get_bool_option('use_tests_requires', default_value=True):
requires_files.extend(getattr(instance, 'test_requires_files', []))
return (pips, requires_files)
requires_files = [] requires_files = []
extra_pips = [] extra_pips = []
for i in self.instances: for i in self.instances:
requires_files.extend(getattr(i, 'requires_files', ())) instance_pips, instance_requires_files = gather_extras(i)
if i.get_bool_option('use_tests_requires', default_value=True): extra_pips.extend(instance_pips)
requires_files.extend(getattr(i, 'test_requires_files', ())) requires_files.extend(instance_requires_files)
# Ensure we include any extra pips that are desired.
i_extra_pips = i.get_option("pips") or []
for i_pip in i_extra_pips:
extra_req = pip_helper.create_requirement(i_pip['name'],
i_pip.get('version'))
extra_pips.append(str(extra_req))
requires_files = filter(sh.isfile, requires_files) requires_files = filter(sh.isfile, requires_files)
self._gather_pips_to_install(requires_files, sorted(set(extra_pips))) self._gather_pips_to_install(requires_files, sorted(set(extra_pips)))
self._clean_pip_requires(requires_files) self._clean_pip_requires(requires_files)
@ -192,6 +198,10 @@ class DependencyHandler(object):
Updates `self.forced_packages` and `self.pips_to_install`. Updates `self.forced_packages` and `self.pips_to_install`.
Writes requirements to `self.gathered_requires_filename`. Writes requirements to `self.gathered_requires_filename`.
""" """
def sort_req(r1, r2):
return cmp(r1.key, r2.key)
extra_pips = extra_pips or [] extra_pips = extra_pips or []
cmdline = [self.multipip_executable] cmdline = [self.multipip_executable]
cmdline = cmdline + extra_pips + ["-r"] + requires_files cmdline = cmdline + extra_pips + ["-r"] + requires_files
@ -205,7 +215,9 @@ class DependencyHandler(object):
stdout, stderr = sh.execute(cmdline, check_exit_code=False) stdout, stderr = sh.execute(cmdline, check_exit_code=False)
self.pips_to_install = list(utils.splitlines_not_empty(stdout)) self.pips_to_install = list(utils.splitlines_not_empty(stdout))
sh.write_file(self.gathered_requires_filename, "\n".join(self.pips_to_install)) sh.write_file(self.gathered_requires_filename, "\n".join(self.pips_to_install))
utils.log_iterable(sorted(self.pips_to_install), logger=LOG, pips_to_install = pip_helper.read_requirement_files([self.gathered_requires_filename])
pips_to_install = sorted(pips_to_install, cmp=sort_req)
utils.log_iterable(pips_to_install, logger=LOG,
header="Full known python dependency list") header="Full known python dependency list")
incompatibles = collections.defaultdict(list) incompatibles = collections.defaultdict(list)
@ -234,12 +246,17 @@ class DependencyHandler(object):
# Translate those that we altered requirements for into a set of forced # Translate those that we altered requirements for into a set of forced
# requirements file (and associated list). # requirements file (and associated list).
self.forced_packages = [] self.forced_packages = [e['req'] for e in self._python_eggs]
forced_packages_keys = [e.key for e in self.forced_packages]
for req in [pip_helper.extract_requirement(line) for line in self.pips_to_install]: for req in [pip_helper.extract_requirement(line) for line in self.pips_to_install]:
if req.key in incompatibles: if req.key in incompatibles and req.key not in forced_packages_keys:
self.forced_packages.append(req) self.forced_packages.append(req)
sh.write_file(self.forced_requires_filename, forced_packages_keys.append(req.key)
"\n".join([str(req) for req in self.forced_packages])) self.forced_packages = sorted(self.forced_packages, cmp=sort_req)
forced_packages = [str(req) for req in self.forced_packages]
utils.log_iterable(forced_packages, logger=LOG,
header="Forced python dependencies")
sh.write_file(self.forced_requires_filename, "\n".join(forced_packages))
def _filter_download_requires(self): def _filter_download_requires(self):
"""Shrinks the pips that were downloaded into a smaller set. """Shrinks the pips that were downloaded into a smaller set.

View File

@ -0,0 +1,4 @@
# Settings for component pycadf
---
...

View File

@ -301,6 +301,13 @@ components:
test: anvil.components.base_testing:PythonTestingComponent test: anvil.components.base_testing:PythonTestingComponent
coverage: anvil.components.base_testing:PythonTestingComponent coverage: anvil.components.base_testing:PythonTestingComponent
uninstall: anvil.components.base_install:PkgUninstallComponent uninstall: anvil.components.base_install:PkgUninstallComponent
pycadf:
action_classes:
install: anvil.components.base_install:PythonInstallComponent
running: anvil.components.base_runtime:EmptyRuntime
test: anvil.components.base_testing:PythonTestingComponent
coverage: anvil.components.base_testing:PythonTestingComponent
uninstall: anvil.components.base_install:PkgUninstallComponent
oslo-messaging: oslo-messaging:
action_classes: action_classes:
install: anvil.components.base_install:PythonInstallComponent install: anvil.components.base_install:PythonInstallComponent

View File

@ -69,3 +69,5 @@ trove:
tag: 2013.2.1 tag: 2013.2.1
oslo-messaging: oslo-messaging:
disabled: True disabled: True
pycadf:
disabled: True

View File

@ -69,3 +69,5 @@ trove:
tag: 2013.2.2 tag: 2013.2.2
oslo-messaging: oslo-messaging:
disabled: True disabled: True
pycadf:
disabled: True

View File

@ -69,3 +69,5 @@ trove:
tag: 2013.2 tag: 2013.2
oslo-messaging: oslo-messaging:
disabled: True disabled: True
pycadf:
disabled: True

View File

@ -72,3 +72,5 @@ trove:
branch: stable/havana branch: stable/havana
oslo-messaging: oslo-messaging:
disabled: True disabled: True
pycadf:
disabled: True

View File

@ -70,3 +70,6 @@ trove:
oslo-messaging: oslo-messaging:
repo: git://github.com/openstack/oslo.messaging.git repo: git://github.com/openstack/oslo.messaging.git
tag: 1.3.0 tag: 1.3.0
pycadf:
repo: git://github.com/openstack/pycadf.git
tag: 0.5

View File

@ -73,3 +73,6 @@ trove-client:
trove: trove:
repo: git://github.com/openstack/trove.git repo: git://github.com/openstack/trove.git
branch: master branch: master
pycadf:
repo: git://github.com/openstack/pycadf.git
tag: master

View File

@ -7,6 +7,7 @@ components:
- rabbit-mq - rabbit-mq
- oslo-config - oslo-config
- oslo-messaging - oslo-messaging
- pycadf
- keystone - keystone
- keystone-client - keystone-client
- glance - glance

View File

@ -6,6 +6,7 @@ components:
- rabbit-mq - rabbit-mq
- oslo-config - oslo-config
- oslo-messaging - oslo-messaging
- pycadf
- keystone - keystone
# Client used by many components # Client used by many components
- keystone-client - keystone-client