Perform oslo-incubator code sync.

run_tests.sh cannot install a virtualenv with tools/install_venv.py as
tools/install_venv_common.py depends on oslo.config which is an external
dependency. The virtualenv installer should not depend on any external
dependencies. Fix this by performing a code sync with oslo-incubator.

Note this adds a new dependency on six and changes the way versioning is
determined.

Change-Id: Ifb67bbfeec4eabc943c2a88a34fe535c57f174a1
This commit is contained in:
Clark Boylan
2013-05-08 11:32:26 -07:00
parent 8f80678677
commit 548b52336f
7 changed files with 234 additions and 218 deletions

View File

@@ -3,7 +3,6 @@ include AUTHORS HACKING LICENSE
include ChangeLog include ChangeLog
include run_tests.sh tox.ini include run_tests.sh tox.ini
include python-keystoneclient include python-keystoneclient
include keystoneclient/versioninfo
recursive-include doc * recursive-include doc *
recursive-include tests * recursive-include tests *
recursive-include tools * recursive-include tools *

View File

@@ -34,15 +34,29 @@ This module provides a few things:
import datetime import datetime
import functools
import inspect import inspect
import itertools import itertools
import json import json
import types
import xmlrpclib import xmlrpclib
import six
from keystoneclient.openstack.common import timeutils from keystoneclient.openstack.common import timeutils
def to_primitive(value, convert_instances=False, level=0): _nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
inspect.isfunction, inspect.isgeneratorfunction,
inspect.isgenerator, inspect.istraceback, inspect.isframe,
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
inspect.isabstract]
_simple_types = (types.NoneType, int, basestring, bool, float, long)
def to_primitive(value, convert_instances=False, convert_datetime=True,
level=0, max_depth=3):
"""Convert a complex object into primitives. """Convert a complex object into primitives.
Handy for JSON serialization. We can optionally handle instances, Handy for JSON serialization. We can optionally handle instances,
@@ -56,19 +70,32 @@ def to_primitive(value, convert_instances=False, level=0):
Therefore, convert_instances=True is lossy ... be aware. Therefore, convert_instances=True is lossy ... be aware.
""" """
nasty = [inspect.ismodule, inspect.isclass, inspect.ismethod, # handle obvious types first - order of basic types determined by running
inspect.isfunction, inspect.isgeneratorfunction, # full tests on nova project, resulting in the following counts:
inspect.isgenerator, inspect.istraceback, inspect.isframe, # 572754 <type 'NoneType'>
inspect.iscode, inspect.isbuiltin, inspect.isroutine, # 460353 <type 'int'>
inspect.isabstract] # 379632 <type 'unicode'>
for test in nasty: # 274610 <type 'str'>
if test(value): # 199918 <type 'dict'>
return unicode(value) # 114200 <type 'datetime.datetime'>
# 51817 <type 'bool'>
# 26164 <type 'list'>
# 6491 <type 'float'>
# 283 <type 'tuple'>
# 19 <type 'long'>
if isinstance(value, _simple_types):
return value
# value of itertools.count doesn't get caught by inspects if isinstance(value, datetime.datetime):
# above and results in infinite loop when list(value) is called. if convert_datetime:
return timeutils.strtime(value)
else:
return value
# value of itertools.count doesn't get caught by nasty_type_tests
# and results in infinite loop when list(value) is called.
if type(value) == itertools.count: if type(value) == itertools.count:
return unicode(value) return six.text_type(value)
# FIXME(vish): Workaround for LP bug 852095. Without this workaround, # FIXME(vish): Workaround for LP bug 852095. Without this workaround,
# tests that raise an exception in a mocked method that # tests that raise an exception in a mocked method that
@@ -78,52 +105,46 @@ def to_primitive(value, convert_instances=False, level=0):
if getattr(value, '__module__', None) == 'mox': if getattr(value, '__module__', None) == 'mox':
return 'mock' return 'mock'
if level > 3: if level > max_depth:
return '?' return '?'
# The try block may not be necessary after the class check above, # The try block may not be necessary after the class check above,
# but just in case ... # but just in case ...
try: try:
recursive = functools.partial(to_primitive,
convert_instances=convert_instances,
convert_datetime=convert_datetime,
level=level,
max_depth=max_depth)
if isinstance(value, dict):
return dict((k, recursive(v)) for k, v in value.iteritems())
elif isinstance(value, (list, tuple)):
return [recursive(lv) for lv in value]
# It's not clear why xmlrpclib created their own DateTime type, but # It's not clear why xmlrpclib created their own DateTime type, but
# for our purposes, make it a datetime type which is explicitly # for our purposes, make it a datetime type which is explicitly
# handled # handled
if isinstance(value, xmlrpclib.DateTime): if isinstance(value, xmlrpclib.DateTime):
value = datetime.datetime(*tuple(value.timetuple())[:6]) value = datetime.datetime(*tuple(value.timetuple())[:6])
if isinstance(value, (list, tuple)): if convert_datetime and isinstance(value, datetime.datetime):
o = []
for v in value:
o.append(to_primitive(v, convert_instances=convert_instances,
level=level))
return o
elif isinstance(value, dict):
o = {}
for k, v in value.iteritems():
o[k] = to_primitive(v, convert_instances=convert_instances,
level=level)
return o
elif isinstance(value, datetime.datetime):
return timeutils.strtime(value) return timeutils.strtime(value)
elif hasattr(value, 'iteritems'): elif hasattr(value, 'iteritems'):
return to_primitive(dict(value.iteritems()), return recursive(dict(value.iteritems()), level=level + 1)
convert_instances=convert_instances,
level=level + 1)
elif hasattr(value, '__iter__'): elif hasattr(value, '__iter__'):
return to_primitive(list(value), return recursive(list(value))
convert_instances=convert_instances,
level=level)
elif convert_instances and hasattr(value, '__dict__'): elif convert_instances and hasattr(value, '__dict__'):
# Likely an instance of something. Watch for cycles. # Likely an instance of something. Watch for cycles.
# Ignore class member vars. # Ignore class member vars.
return to_primitive(value.__dict__, return recursive(value.__dict__, level=level + 1)
convert_instances=convert_instances,
level=level + 1)
else: else:
if any(test(value) for test in _nasty_type_tests):
return six.text_type(value)
return value return value
except TypeError, e: except TypeError:
# Class objects are tricky since they may define something like # Class objects are tricky since they may define something like
# __iter__ defined but it isn't callable as list(). # __iter__ defined but it isn't callable as list().
return unicode(value) return six.text_type(value)
def dumps(value, default=to_primitive, **kwargs): def dumps(value, default=to_primitive, **kwargs):

View File

@@ -1,6 +1,7 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC. # Copyright 2011 OpenStack Foundation.
# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved. # All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -19,7 +20,9 @@
Utilities with minimum-depends for use in setup.py Utilities with minimum-depends for use in setup.py
""" """
import datetime from __future__ import print_function
import email
import os import os
import re import re
import subprocess import subprocess
@@ -33,20 +36,26 @@ def parse_mailmap(mailmap='.mailmap'):
if os.path.exists(mailmap): if os.path.exists(mailmap):
with open(mailmap, 'r') as fp: with open(mailmap, 'r') as fp:
for l in fp: for l in fp:
l = l.strip() try:
if not l.startswith('#') and ' ' in l: canonical_email, alias = re.match(
canonical_email, alias = [x for x in l.split(' ') r'[^#]*?(<.+>).*(<.+>).*', l).groups()
if x.startswith('<')] except AttributeError:
mapping[alias] = canonical_email continue
mapping[alias] = canonical_email
return mapping return mapping
def _parse_git_mailmap(git_dir, mailmap='.mailmap'):
mailmap = os.path.join(os.path.dirname(git_dir), mailmap)
return parse_mailmap(mailmap)
def canonicalize_emails(changelog, mapping): def canonicalize_emails(changelog, mapping):
"""Takes in a string and an email alias mapping and replaces all """Takes in a string and an email alias mapping and replaces all
instances of the aliases in the string with their real email. instances of the aliases in the string with their real email.
""" """
for alias, email in mapping.iteritems(): for alias, email_address in mapping.iteritems():
changelog = changelog.replace(alias, email) changelog = changelog.replace(alias, email_address)
return changelog return changelog
@@ -106,24 +115,18 @@ def parse_dependency_links(requirements_files=['requirements.txt',
return dependency_links return dependency_links
def write_requirements(): def _run_shell_command(cmd, throw_on_error=False):
venv = os.environ.get('VIRTUAL_ENV', None)
if venv is not None:
with open("requirements.txt", "w") as req_file:
output = subprocess.Popen(["pip", "-E", venv, "freeze", "-l"],
stdout=subprocess.PIPE)
requirements = output.communicate()[0].strip()
req_file.write(requirements)
def _run_shell_command(cmd):
if os.name == 'nt': if os.name == 'nt':
output = subprocess.Popen(["cmd.exe", "/C", cmd], output = subprocess.Popen(["cmd.exe", "/C", cmd],
stdout=subprocess.PIPE) stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
else: else:
output = subprocess.Popen(["/bin/sh", "-c", cmd], output = subprocess.Popen(["/bin/sh", "-c", cmd],
stdout=subprocess.PIPE) stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out = output.communicate() out = output.communicate()
if output.returncode and throw_on_error:
raise Exception("%s returned %d" % cmd, output.returncode)
if len(out) == 0: if len(out) == 0:
return None return None
if len(out[0].strip()) == 0: if len(out[0].strip()) == 0:
@@ -131,65 +134,26 @@ def _run_shell_command(cmd):
return out[0].strip() return out[0].strip()
def _get_git_next_version_suffix(branch_name): def _get_git_directory():
datestamp = datetime.datetime.now().strftime('%Y%m%d') parent_dir = os.path.dirname(__file__)
if branch_name == 'milestone-proposed': while True:
revno_prefix = "r" git_dir = os.path.join(parent_dir, '.git')
else: if os.path.exists(git_dir):
revno_prefix = "" return git_dir
_run_shell_command("git fetch origin +refs/meta/*:refs/remotes/meta/*") parent_dir, child = os.path.split(parent_dir)
milestone_cmd = "git show meta/openstack/release:%s" % branch_name if not child: # reached to root dir
milestonever = _run_shell_command(milestone_cmd) return None
if milestonever:
first_half = "%s~%s" % (milestonever, datestamp)
else:
first_half = datestamp
post_version = _get_git_post_version()
# post version should look like:
# 0.1.1.4.gcc9e28a
# where the bit after the last . is the short sha, and the bit between
# the last and second to last is the revno count
(revno, sha) = post_version.split(".")[-2:]
second_half = "%s%s.%s" % (revno_prefix, revno, sha)
return ".".join((first_half, second_half))
def _get_git_current_tag():
return _run_shell_command("git tag --contains HEAD")
def _get_git_tag_info():
return _run_shell_command("git describe --tags")
def _get_git_post_version():
current_tag = _get_git_current_tag()
if current_tag is not None:
return current_tag
else:
tag_info = _get_git_tag_info()
if tag_info is None:
base_version = "0.0"
cmd = "git --no-pager log --oneline"
out = _run_shell_command(cmd)
revno = len(out.split("\n"))
sha = _run_shell_command("git describe --always")
else:
tag_infos = tag_info.split("-")
base_version = "-".join(tag_infos[:-2])
(revno, sha) = tag_infos[-2:]
return "%s.%s.%s" % (base_version, revno, sha)
def write_git_changelog(): def write_git_changelog():
"""Write a changelog based on the git changelog.""" """Write a changelog based on the git changelog."""
new_changelog = 'ChangeLog' new_changelog = 'ChangeLog'
git_dir = _get_git_directory()
if not os.getenv('SKIP_WRITE_GIT_CHANGELOG'): if not os.getenv('SKIP_WRITE_GIT_CHANGELOG'):
if os.path.isdir('.git'): if git_dir:
git_log_cmd = 'git log --stat' git_log_cmd = 'git --git-dir=%s log' % git_dir
changelog = _run_shell_command(git_log_cmd) changelog = _run_shell_command(git_log_cmd)
mailmap = parse_mailmap() mailmap = _parse_git_mailmap(git_dir)
with open(new_changelog, "w") as changelog_file: with open(new_changelog, "w") as changelog_file:
changelog_file.write(canonicalize_emails(changelog, mailmap)) changelog_file.write(canonicalize_emails(changelog, mailmap))
else: else:
@@ -201,13 +165,23 @@ def generate_authors():
jenkins_email = 'jenkins@review.(openstack|stackforge).org' jenkins_email = 'jenkins@review.(openstack|stackforge).org'
old_authors = 'AUTHORS.in' old_authors = 'AUTHORS.in'
new_authors = 'AUTHORS' new_authors = 'AUTHORS'
git_dir = _get_git_directory()
if not os.getenv('SKIP_GENERATE_AUTHORS'): if not os.getenv('SKIP_GENERATE_AUTHORS'):
if os.path.isdir('.git'): if git_dir:
# don't include jenkins email address in AUTHORS file # don't include jenkins email address in AUTHORS file
git_log_cmd = ("git log --format='%aN <%aE>' | sort -u | " git_log_cmd = ("git --git-dir=" + git_dir +
" log --format='%aN <%aE>' | sort -u | "
"egrep -v '" + jenkins_email + "'") "egrep -v '" + jenkins_email + "'")
changelog = _run_shell_command(git_log_cmd) changelog = _run_shell_command(git_log_cmd)
mailmap = parse_mailmap() signed_cmd = ("git --git-dir=" + git_dir +
" log | grep -i Co-authored-by: | sort -u")
signed_entries = _run_shell_command(signed_cmd)
if signed_entries:
new_entries = "\n".join(
[signed.split(":", 1)[1].strip()
for signed in signed_entries.split("\n") if signed])
changelog = "\n".join((changelog, new_entries))
mailmap = _parse_git_mailmap(git_dir)
with open(new_authors, 'w') as new_authors_fh: with open(new_authors, 'w') as new_authors_fh:
new_authors_fh.write(canonicalize_emails(changelog, mailmap)) new_authors_fh.write(canonicalize_emails(changelog, mailmap))
if os.path.exists(old_authors): if os.path.exists(old_authors):
@@ -227,26 +201,6 @@ _rst_template = """%(heading)s
""" """
def read_versioninfo(project):
"""Read the versioninfo file. If it doesn't exist, we're in a github
zipball, and there's really no way to know what version we really
are, but that should be ok, because the utility of that should be
just about nil if this code path is in use in the first place."""
versioninfo_path = os.path.join(project, 'versioninfo')
if os.path.exists(versioninfo_path):
with open(versioninfo_path, 'r') as vinfo:
version = vinfo.read().strip()
else:
version = "0.0.0"
return version
def write_versioninfo(project, version):
"""Write a simple file containing the version of the package."""
with open(os.path.join(project, 'versioninfo'), 'w') as fil:
fil.write("%s\n" % version)
def get_cmdclass(): def get_cmdclass():
"""Return dict of commands to run from setup.py.""" """Return dict of commands to run from setup.py."""
@@ -276,8 +230,11 @@ def get_cmdclass():
from sphinx.setup_command import BuildDoc from sphinx.setup_command import BuildDoc
class LocalBuildDoc(BuildDoc): class LocalBuildDoc(BuildDoc):
builders = ['html', 'man']
def generate_autoindex(self): def generate_autoindex(self):
print "**Autodocumenting from %s" % os.path.abspath(os.curdir) print("**Autodocumenting from %s" % os.path.abspath(os.curdir))
modules = {} modules = {}
option_dict = self.distribution.get_option_dict('build_sphinx') option_dict = self.distribution.get_option_dict('build_sphinx')
source_dir = os.path.join(option_dict['source_dir'][1], 'api') source_dir = os.path.join(option_dict['source_dir'][1], 'api')
@@ -302,7 +259,7 @@ def get_cmdclass():
values = dict(module=module, heading=heading, values = dict(module=module, heading=heading,
underline=underline) underline=underline)
print "Generating %s" % output_filename print("Generating %s" % output_filename)
with open(output_filename, 'w') as output_file: with open(output_filename, 'w') as output_file:
output_file.write(_rst_template % values) output_file.write(_rst_template % values)
autoindex.write(" %s.rst\n" % module) autoindex.write(" %s.rst\n" % module)
@@ -311,56 +268,102 @@ def get_cmdclass():
if not os.getenv('SPHINX_DEBUG'): if not os.getenv('SPHINX_DEBUG'):
self.generate_autoindex() self.generate_autoindex()
for builder in ['html', 'man']: for builder in self.builders:
self.builder = builder self.builder = builder
self.finalize_options() self.finalize_options()
self.project = self.distribution.get_name() self.project = self.distribution.get_name()
self.version = self.distribution.get_version() self.version = self.distribution.get_version()
self.release = self.distribution.get_version() self.release = self.distribution.get_version()
BuildDoc.run(self) BuildDoc.run(self)
class LocalBuildLatex(LocalBuildDoc):
builders = ['latex']
cmdclass['build_sphinx'] = LocalBuildDoc cmdclass['build_sphinx'] = LocalBuildDoc
cmdclass['build_sphinx_latex'] = LocalBuildLatex
except ImportError: except ImportError:
pass pass
return cmdclass return cmdclass
def get_git_branchname(): def _get_revno(git_dir):
for branch in _run_shell_command("git branch --color=never").split("\n"): """Return the number of commits since the most recent tag.
if branch.startswith('*'):
_branch_name = branch.split()[1].strip() We use git-describe to find this out, but if there are no
if _branch_name == "(no": tags then we fall back to counting commits since the beginning
_branch_name = "no-branch" of time.
return _branch_name """
describe = _run_shell_command(
"git --git-dir=%s describe --always" % git_dir)
if "-" in describe:
return describe.rsplit("-", 2)[-2]
# no tags found
revlist = _run_shell_command(
"git --git-dir=%s rev-list --abbrev-commit HEAD" % git_dir)
return len(revlist.splitlines())
def get_pre_version(projectname, base_version): def _get_version_from_git(pre_version):
"""Return a version which is leading up to a version that will
be released in the future."""
if os.path.isdir('.git'):
current_tag = _get_git_current_tag()
if current_tag is not None:
version = current_tag
else:
branch_name = os.getenv('BRANCHNAME',
os.getenv('GERRIT_REFNAME',
get_git_branchname()))
version_suffix = _get_git_next_version_suffix(branch_name)
version = "%s~%s" % (base_version, version_suffix)
write_versioninfo(projectname, version)
return version
else:
version = read_versioninfo(projectname)
return version
def get_post_version(projectname):
"""Return a version which is equal to the tag that's on the current """Return a version which is equal to the tag that's on the current
revision if there is one, or tag plus number of additional revisions revision if there is one, or tag plus number of additional revisions
if the current revision has no tag.""" if the current revision has no tag."""
if os.path.isdir('.git'): git_dir = _get_git_directory()
version = _get_git_post_version() if git_dir:
write_versioninfo(projectname, version) if pre_version:
try:
return _run_shell_command(
"git --git-dir=" + git_dir + " describe --exact-match",
throw_on_error=True).replace('-', '.')
except Exception:
sha = _run_shell_command(
"git --git-dir=" + git_dir + " log -n1 --pretty=format:%h")
return "%s.a%s.g%s" % (pre_version, _get_revno(git_dir), sha)
else:
return _run_shell_command(
"git --git-dir=" + git_dir + " describe --always").replace(
'-', '.')
return None
def _get_version_from_pkg_info(package_name):
"""Get the version from PKG-INFO file if we can."""
try:
pkg_info_file = open('PKG-INFO', 'r')
except (IOError, OSError):
return None
try:
pkg_info = email.message_from_file(pkg_info_file)
except email.MessageError:
return None
# Check to make sure we're in our own dir
if pkg_info.get('Name', None) != package_name:
return None
return pkg_info.get('Version', None)
def get_version(package_name, pre_version=None):
"""Get the version of the project. First, try getting it from PKG-INFO, if
it exists. If it does, that means we're in a distribution tarball or that
install has happened. Otherwise, if there is no PKG-INFO file, pull the
version from git.
We do not support setup.py version sanity in git archive tarballs, nor do
we support packagers directly sucking our git repo into theirs. We expect
that a source tarball be made from our git repo - or that if someone wants
to make a source tarball from a fork of our repo with additional tags in it
that they understand and desire the results of doing that.
"""
version = os.environ.get("OSLO_PACKAGE_VERSION", None)
if version:
return version return version
return read_versioninfo(projectname) version = _get_version_from_pkg_info(package_name)
if version:
return version
version = _get_version_from_git(pre_version)
if version:
return version
raise Exception("Versioning for this project requires either an sdist"
" tarball, or access to an upstream git repository.")

View File

@@ -1,6 +1,6 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC. # Copyright 2011 OpenStack Foundation.
# All Rights Reserved. # All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -25,18 +25,22 @@ import datetime
import iso8601 import iso8601
TIME_FORMAT = "%Y-%m-%dT%H:%M:%S" # ISO 8601 extended time format with microseconds
PERFECT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" _ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f'
_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND
def isotime(at=None): def isotime(at=None, subsecond=False):
"""Stringify time in ISO 8601 format""" """Stringify time in ISO 8601 format"""
if not at: if not at:
at = utcnow() at = utcnow()
str = at.strftime(TIME_FORMAT) st = at.strftime(_ISO8601_TIME_FORMAT
if not subsecond
else _ISO8601_TIME_FORMAT_SUBSECOND)
tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
str += ('Z' if tz == 'UTC' else tz) st += ('Z' if tz == 'UTC' else tz)
return str return st
def parse_isotime(timestr): def parse_isotime(timestr):

View File

@@ -14,7 +14,7 @@ tests_require = setup.parse_requirements(['tools/test-requires'])
setuptools.setup( setuptools.setup(
name="python-keystoneclient", name="python-keystoneclient",
version=setup.get_post_version('keystoneclient'), version=setup.get_version('python-keystoneclient'),
description="Client library for OpenStack Identity API (Keystone)", description="Client library for OpenStack Identity API (Keystone)",
long_description=read('README.rst'), long_description=read('README.rst'),
url='https://github.com/openstack/python-keystoneclient', url='https://github.com/openstack/python-keystoneclient',
@@ -42,5 +42,4 @@ setuptools.setup(
entry_points={ entry_points={
'console_scripts': ['keystone = keystoneclient.shell:main'] 'console_scripts': ['keystone = keystoneclient.shell:main']
}, },
data_files=[('keystoneclient', ['keystoneclient/versioninfo'])],
) )

View File

@@ -1,6 +1,6 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack, LLC # Copyright 2013 OpenStack Foundation
# Copyright 2013 IBM Corp. # Copyright 2013 IBM Corp.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -18,23 +18,20 @@
"""Provides methods needed by installation script for OpenStack development """Provides methods needed by installation script for OpenStack development
virtual environments. virtual environments.
Since this script is used to bootstrap a virtualenv from the system's Python
environment, it should be kept strictly compatible with Python 2.6.
Synced in from openstack-common Synced in from openstack-common
""" """
from __future__ import print_function
import optparse
import os import os
import subprocess import subprocess
import sys import sys
possible_topdir = os.getcwd()
if os.path.exists(os.path.join(possible_topdir, "keystoneclient",
"__init__.py")):
sys.path.insert(0, possible_topdir)
from oslo.config import cfg
class InstallVenv(object): class InstallVenv(object):
def __init__(self, root, venv, pip_requires, test_requires, py_version, def __init__(self, root, venv, pip_requires, test_requires, py_version,
@@ -47,7 +44,7 @@ class InstallVenv(object):
self.project = project self.project = project
def die(self, message, *args): def die(self, message, *args):
print >> sys.stderr, message % args print(message % args, file=sys.stderr)
sys.exit(1) sys.exit(1)
def check_python_version(self): def check_python_version(self):
@@ -58,7 +55,7 @@ class InstallVenv(object):
check_exit_code=True): check_exit_code=True):
"""Runs a command in an out-of-process shell. """Runs a command in an out-of-process shell.
Returns the output of that command. Working directory is ROOT. Returns the output of that command. Working directory is self.root.
""" """
if redirect_output: if redirect_output:
stdout = subprocess.PIPE stdout = subprocess.PIPE
@@ -94,20 +91,20 @@ class InstallVenv(object):
virtual environment. virtual environment.
""" """
if not os.path.isdir(self.venv): if not os.path.isdir(self.venv):
print 'Creating venv...', print('Creating venv...', end=' ')
if no_site_packages: if no_site_packages:
self.run_command(['virtualenv', '-q', '--no-site-packages', self.run_command(['virtualenv', '-q', '--no-site-packages',
self.venv]) self.venv])
else: else:
self.run_command(['virtualenv', '-q', self.venv]) self.run_command(['virtualenv', '-q', self.venv])
print 'done.' print('done.')
print 'Installing pip in virtualenv...', print('Installing pip in venv...', end=' ')
if not self.run_command(['tools/with_venv.sh', 'easy_install', if not self.run_command(['tools/with_venv.sh', 'easy_install',
'pip>1.0']).strip(): 'pip>1.0']).strip():
self.die("Failed to install pip.") self.die("Failed to install pip.")
print 'done.' print('done.')
else: else:
print "venv already exists..." print("venv already exists...")
pass pass
def pip_install(self, *args): def pip_install(self, *args):
@@ -116,7 +113,7 @@ class InstallVenv(object):
redirect_output=False) redirect_output=False)
def install_dependencies(self): def install_dependencies(self):
print 'Installing dependencies with pip (this can take a while)...' print('Installing dependencies with pip (this can take a while)...')
# First things first, make sure our venv has the latest pip and # First things first, make sure our venv has the latest pip and
# distribute. # distribute.
@@ -139,17 +136,12 @@ class InstallVenv(object):
def parse_args(self, argv): def parse_args(self, argv):
"""Parses command-line arguments.""" """Parses command-line arguments."""
cli_opts = [ parser = optparse.OptionParser()
cfg.BoolOpt('no-site-packages', parser.add_option('-n', '--no-site-packages',
default=False, action='store_true',
short='n', help="Do not inherit packages from global Python "
help="Do not inherit packages from global Python" "install")
"install"), return parser.parse_args(argv[1:])[0]
]
CLI = cfg.ConfigOpts()
CLI.register_cli_opts(cli_opts)
CLI(argv[1:])
return CLI
class Distro(InstallVenv): class Distro(InstallVenv):
@@ -163,12 +155,12 @@ class Distro(InstallVenv):
return return
if self.check_cmd('easy_install'): if self.check_cmd('easy_install'):
print 'Installing virtualenv via easy_install...', print('Installing virtualenv via easy_install...', end=' ')
if self.run_command(['easy_install', 'virtualenv']): if self.run_command(['easy_install', 'virtualenv']):
print 'Succeeded' print('Succeeded')
return return
else: else:
print 'Failed' print('Failed')
self.die('ERROR: virtualenv not found.\n\n%s development' self.die('ERROR: virtualenv not found.\n\n%s development'
' requires virtualenv, please install it using your' ' requires virtualenv, please install it using your'
@@ -193,19 +185,16 @@ class Fedora(Distro):
return self.run_command_with_code(['rpm', '-q', pkg], return self.run_command_with_code(['rpm', '-q', pkg],
check_exit_code=False)[1] == 0 check_exit_code=False)[1] == 0
def yum_install(self, pkg, **kwargs):
print "Attempting to install '%s' via yum" % pkg
self.run_command(['sudo', 'yum', 'install', '-y', pkg], **kwargs)
def apply_patch(self, originalfile, patchfile): def apply_patch(self, originalfile, patchfile):
self.run_command(['patch', originalfile, patchfile]) self.run_command(['patch', '-N', originalfile, patchfile],
check_exit_code=False)
def install_virtualenv(self): def install_virtualenv(self):
if self.check_cmd('virtualenv'): if self.check_cmd('virtualenv'):
return return
if not self.check_pkg('python-virtualenv'): if not self.check_pkg('python-virtualenv'):
self.yum_install('python-virtualenv', check_exit_code=False) self.die("Please install 'python-virtualenv'.")
super(Fedora, self).install_virtualenv() super(Fedora, self).install_virtualenv()
@@ -223,7 +212,7 @@ class Fedora(Distro):
# Install "patch" program if it's not there # Install "patch" program if it's not there
if not self.check_pkg('patch'): if not self.check_pkg('patch'):
self.yum_install('patch') self.die("Please install 'patch'.")
# Apply the eventlet patch # Apply the eventlet patch
self.apply_patch(os.path.join(self.venv, 'lib', self.py_version, self.apply_patch(os.path.join(self.venv, 'lib', self.py_version,

View File

@@ -3,4 +3,5 @@ iso8601>=0.1.4
prettytable>=0.6,<0.8 prettytable>=0.6,<0.8
requests>=0.8.8 requests>=0.8.8
simplejson simplejson
six
oslo.config>=1.1.0 oslo.config>=1.1.0