Just use d2to1 via hooks mechanism.
It turns out we can just have only our code and use d2to1 directly. Change-Id: I4700ad51bf4377797ede55d3cf5ec09f424874ed
This commit is contained in:
parent
4a674682f6
commit
caf4be4778
@ -1,144 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Copyright (C) 2013 Association of Universities for Research in Astronomy
|
||||
# (AURA)
|
||||
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# 3. The name of AURA and its representatives may not be used to
|
||||
# endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
||||
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
# DAMAGE.
|
||||
|
||||
from distutils.core import Distribution as _Distribution
|
||||
from distutils.errors import DistutilsFileError, DistutilsSetupError
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from distutils import log
|
||||
import pkg_resources
|
||||
from setuptools.dist import _get_unpatched
|
||||
from .extern import six
|
||||
|
||||
from oslo.packaging import util
|
||||
|
||||
_Distribution = _get_unpatched(_Distribution)
|
||||
log.set_verbosity(log.INFO)
|
||||
|
||||
|
||||
def setup(dist, attr, value):
|
||||
"""Implements the actual oslo.packaging setup() keyword.
|
||||
|
||||
When used, this should be the only keyword in your setup() aside from
|
||||
`setup_requires`.
|
||||
|
||||
This works by reading the setup.cfg file, parsing out the supported
|
||||
metadata and command options, and using them to rebuild the
|
||||
`DistributionMetadata` object and set the newly added command options.
|
||||
|
||||
The reason for doing things this way is that a custom `Distribution` class
|
||||
will not play nicely with setup_requires; however, this implementation may
|
||||
not work well with distributions that do use a `Distribution` subclass.
|
||||
"""
|
||||
|
||||
log.info("[oslo.packaging] Processing setup.cfg")
|
||||
if not value:
|
||||
return
|
||||
path = os.path.abspath('setup.cfg')
|
||||
if not os.path.exists(path):
|
||||
raise DistutilsFileError(
|
||||
'The setup.cfg file %s does not exist.' % path)
|
||||
|
||||
# Converts the setup.cfg file to setup() arguments
|
||||
try:
|
||||
attrs = util.cfg_to_args(path)
|
||||
except:
|
||||
e = sys.exc_info()[1]
|
||||
raise DistutilsSetupError(
|
||||
'Error parsing %s: %s: %s' % (path, e.__class__.__name__,
|
||||
six.u(e)))
|
||||
|
||||
# Repeat some of the Distribution initialization code with the newly
|
||||
# provided attrs
|
||||
if attrs:
|
||||
|
||||
# Handle additional setup processing
|
||||
if 'setup_requires' in attrs:
|
||||
chainload_setups(dist, attrs['setup_requires'])
|
||||
|
||||
for ep in pkg_resources.iter_entry_points(
|
||||
'oslo.packaging.attr_filters'):
|
||||
filter_method = ep.load()
|
||||
attrs = filter_method(attrs)
|
||||
|
||||
# Skips 'options' and 'licence' support which are rarely used; may add
|
||||
# back in later if demanded
|
||||
for key, val in six.iteritems(attrs):
|
||||
if hasattr(dist.metadata, 'set_' + key):
|
||||
getattr(dist.metadata, 'set_' + key)(val)
|
||||
elif hasattr(dist.metadata, key):
|
||||
setattr(dist.metadata, key, val)
|
||||
elif hasattr(dist, key):
|
||||
setattr(dist, key, val)
|
||||
else:
|
||||
msg = 'Unknown distribution option: %s' % repr(key)
|
||||
warnings.warn(msg)
|
||||
|
||||
# Re-finalize the underlying Distribution
|
||||
_Distribution.finalize_options(dist)
|
||||
|
||||
# This bit comes out of distribute/setuptools
|
||||
if isinstance(dist.metadata.version, six.integer_types + (float,)):
|
||||
# Some people apparently take "version number" too literally :)
|
||||
dist.metadata.version = str(dist.metadata.version)
|
||||
|
||||
|
||||
def chainload_setups(dist, requires_list):
|
||||
try:
|
||||
import pip.command.install
|
||||
except ImportError:
|
||||
from setuptools.command.easy_install import easy_install
|
||||
cmd = easy_install(dist, args=["x"], install_dir=os.curdir,
|
||||
exclude_scripts=True, always_copy=False,
|
||||
build_directory=None, editable=False,
|
||||
upgrade=False, multi_version=True, no_report=True)
|
||||
cmd.ensure_finalized()
|
||||
cmd.easy_install("req")
|
||||
import pip.command.install
|
||||
|
||||
pip_install = pip.command.install.InstallCommand()
|
||||
pip_install.run({}, requires_list)
|
0
oslo/packaging/extern/__init__.py
vendored
0
oslo/packaging/extern/__init__.py
vendored
391
oslo/packaging/extern/six.py
vendored
391
oslo/packaging/extern/six.py
vendored
@ -1,391 +0,0 @@
|
||||
# Copyright (c) 2010-2011 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.2.0"
|
||||
|
||||
|
||||
# True if we are running on Python 3.
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
integer_types = int,
|
||||
class_types = type,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
else:
|
||||
string_types = basestring,
|
||||
integer_types = (int, long)
|
||||
class_types = (type, types.ClassType)
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
if sys.platform == "java":
|
||||
# Jython always uses 32 bits.
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
len(X())
|
||||
except OverflowError:
|
||||
# 32-bit
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# 64-bit
|
||||
MAXSIZE = int((1 << 63) - 1)
|
||||
del X
|
||||
|
||||
|
||||
def _add_doc(func, doc):
|
||||
"""Add documentation to a function."""
|
||||
func.__doc__ = doc
|
||||
|
||||
|
||||
def _import_module(name):
|
||||
"""Import module, returning the module after the last dot."""
|
||||
__import__(name)
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
class _LazyDescr(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result)
|
||||
# This is a bit ugly, but it avoids running this again.
|
||||
delattr(tp, self.name)
|
||||
return result
|
||||
|
||||
|
||||
class MovedModule(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old, new=None):
|
||||
super(MovedModule, self).__init__(name)
|
||||
if PY3:
|
||||
if new is None:
|
||||
new = name
|
||||
self.mod = new
|
||||
else:
|
||||
self.mod = old
|
||||
|
||||
def _resolve(self):
|
||||
return _import_module(self.mod)
|
||||
|
||||
|
||||
class MovedAttribute(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||
super(MovedAttribute, self).__init__(name)
|
||||
if PY3:
|
||||
if new_mod is None:
|
||||
new_mod = name
|
||||
self.mod = new_mod
|
||||
if new_attr is None:
|
||||
if old_attr is None:
|
||||
new_attr = name
|
||||
else:
|
||||
new_attr = old_attr
|
||||
self.attr = new_attr
|
||||
else:
|
||||
self.mod = old_mod
|
||||
if old_attr is None:
|
||||
old_attr = name
|
||||
self.attr = old_attr
|
||||
|
||||
def _resolve(self):
|
||||
module = _import_module(self.mod)
|
||||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
class _MovedItems(types.ModuleType):
|
||||
"""Lazy loading of moved objects"""
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||
MovedModule("cPickle", "cPickle", "pickle"),
|
||||
MovedModule("queue", "Queue"),
|
||||
MovedModule("reprlib", "repr"),
|
||||
MovedModule("socketserver", "SocketServer"),
|
||||
MovedModule("tkinter", "Tkinter"),
|
||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_scrolledtext", "ScrolledText",
|
||||
"tkinter.scrolledtext"),
|
||||
MovedModule("tkinter_simpledialog", "SimpleDialog",
|
||||
"tkinter.simpledialog"),
|
||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||
"tkinter.colorchooser"),
|
||||
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||
"tkinter.commondialog"),
|
||||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||
"tkinter.simpledialog"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
del attr
|
||||
|
||||
moves = sys.modules["six.moves"] = _MovedItems("moves")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
"""Add an item to six.moves."""
|
||||
setattr(_MovedItems, move.name, move)
|
||||
|
||||
|
||||
def remove_move(name):
|
||||
"""Remove item from six.moves."""
|
||||
try:
|
||||
delattr(_MovedItems, name)
|
||||
except AttributeError:
|
||||
try:
|
||||
del moves.__dict__[name]
|
||||
except KeyError:
|
||||
raise AttributeError("no such move, %r" % (name,))
|
||||
|
||||
|
||||
if PY3:
|
||||
_meth_func = "__func__"
|
||||
_meth_self = "__self__"
|
||||
|
||||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
|
||||
_iterkeys = "keys"
|
||||
_itervalues = "values"
|
||||
_iteritems = "items"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
|
||||
_func_code = "func_code"
|
||||
_func_defaults = "func_defaults"
|
||||
|
||||
_iterkeys = "iterkeys"
|
||||
_itervalues = "itervalues"
|
||||
_iteritems = "iteritems"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
||||
next = advance_iterator
|
||||
|
||||
|
||||
if PY3:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound
|
||||
|
||||
Iterator = object
|
||||
|
||||
def callable(obj):
|
||||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound.im_func
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
return type(self).__next__(self)
|
||||
|
||||
callable = callable
|
||||
_add_doc(get_unbound_function,
|
||||
"""Get the function out of a possibly unbound function""")
|
||||
|
||||
|
||||
get_method_function = operator.attrgetter(_meth_func)
|
||||
get_method_self = operator.attrgetter(_meth_self)
|
||||
get_function_code = operator.attrgetter(_func_code)
|
||||
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||
|
||||
|
||||
def iterkeys(d):
|
||||
"""Return an iterator over the keys of a dictionary."""
|
||||
return iter(getattr(d, _iterkeys)())
|
||||
|
||||
|
||||
def itervalues(d):
|
||||
"""Return an iterator over the values of a dictionary."""
|
||||
return iter(getattr(d, _itervalues)())
|
||||
|
||||
|
||||
def iteritems(d):
|
||||
"""Return an iterator over the (key, value) pairs of a dictionary."""
|
||||
return iter(getattr(d, _iteritems)())
|
||||
|
||||
|
||||
if PY3:
|
||||
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
if sys.version_info[1] <= 1:
|
||||
def int2byte(i):
|
||||
return bytes((i,))
|
||||
else:
|
||||
# This is about 2x faster than the implementation above on 3.2+
|
||||
int2byte = operator.methodcaller("to_bytes", 1, "big")
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
else:
|
||||
|
||||
def b(s):
|
||||
return s
|
||||
|
||||
def u(s):
|
||||
return unicode(s, "unicode_escape")
|
||||
int2byte = chr
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
if PY3:
|
||||
import builtins
|
||||
exec_ = getattr(builtins, "exec")
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
print_ = getattr(builtins, "print")
|
||||
del builtins
|
||||
|
||||
else:
|
||||
|
||||
def exec_(code, globs=None, locs=None):
|
||||
"""Execute code in a namespace."""
|
||||
if globs is None:
|
||||
frame = sys._getframe(1)
|
||||
globs = frame.f_globals
|
||||
if locs is None:
|
||||
locs = frame.f_locals
|
||||
del frame
|
||||
elif locs is None:
|
||||
locs = globs
|
||||
exec("""exec code in globs, locs""")
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb
|
||||
""")
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
"""The new-style print function."""
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
fp.write(data)
|
||||
want_unicode = False
|
||||
sep = kwargs.pop("sep", None)
|
||||
if sep is not None:
|
||||
if isinstance(sep, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(sep, str):
|
||||
raise TypeError("sep must be None or a string")
|
||||
end = kwargs.pop("end", None)
|
||||
if end is not None:
|
||||
if isinstance(end, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(end, str):
|
||||
raise TypeError("end must be None or a string")
|
||||
if kwargs:
|
||||
raise TypeError("invalid keyword arguments to print()")
|
||||
if not want_unicode:
|
||||
for arg in args:
|
||||
if isinstance(arg, unicode):
|
||||
want_unicode = True
|
||||
break
|
||||
if want_unicode:
|
||||
newline = unicode("\n")
|
||||
space = unicode(" ")
|
||||
else:
|
||||
newline = "\n"
|
||||
space = " "
|
||||
if sep is None:
|
||||
sep = space
|
||||
if end is None:
|
||||
end = newline
|
||||
for i, arg in enumerate(args):
|
||||
if i:
|
||||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
|
||||
def with_metaclass(meta, base=object):
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta("NewBase", (base,), {})
|
57
oslo/packaging/hooks.py
Normal file
57
oslo/packaging/hooks.py
Normal file
@ -0,0 +1,57 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2013 Hewlett-Packard Development Company, L.P.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import setuptools
|
||||
|
||||
from oslo.packaging import packaging
|
||||
|
||||
|
||||
def smart_find_packages(package_list):
|
||||
"""Run find_packages the way we intend."""
|
||||
packages = []
|
||||
for pkg in package_list.strip().split("\n"):
|
||||
pkg_path = pkg.replace('.', os.path.sep)
|
||||
packages.append(pkg)
|
||||
packages.extend(['%s.%s' % (pkg, f)
|
||||
for f in setuptools.find_packages(pkg_path)])
|
||||
return "\n".join(set(packages))
|
||||
|
||||
|
||||
def setup_hook(config):
|
||||
"""Filter config parsed from a setup.cfg to inject our defaults."""
|
||||
metadata = config['metadata']
|
||||
metadata['version'] = packaging.get_version(metadata['name'],
|
||||
metadata.get('version', None))
|
||||
metadata['requires_dist'] = "\n".join(packaging.parse_requirements())
|
||||
config['metadata'] = metadata
|
||||
|
||||
config['global'] = config.get('global', dict())
|
||||
config['global']['commands'] = config['global'].get('commands', "") + """
|
||||
oslo.packaging.packaging.LocalSDist
|
||||
oslo.packaging.packaging.LocalBuildDoc
|
||||
oslo.packaging.packaging.LocalBuildLatex
|
||||
"""
|
||||
|
||||
# TODO: Need to get these two included upstream
|
||||
#attrs['dependency_links'] = parse_dependency_links()
|
||||
#attrs['include_package_data'] = True
|
||||
|
||||
files = config.get('files', dict())
|
||||
files['packages'] = smart_find_packages(
|
||||
files.get('packages', metadata['name']))
|
||||
config['files'] = files
|
@ -27,8 +27,8 @@ import subprocess
|
||||
import sys
|
||||
|
||||
from distutils import log
|
||||
import setuptools
|
||||
from setuptools.command import sdist
|
||||
from sphinx.setup_command import BuildDoc
|
||||
|
||||
log.set_verbosity(log.INFO)
|
||||
|
||||
@ -197,92 +197,78 @@ _rst_template = """%(heading)s
|
||||
"""
|
||||
|
||||
|
||||
def get_cmdclass():
|
||||
"""Return dict of commands to run from setup.py."""
|
||||
def _find_modules(arg, dirname, files):
|
||||
for filename in files:
|
||||
if filename.endswith('.py') and filename != '__init__.py':
|
||||
arg["%s.%s" % (dirname.replace('/', '.'),
|
||||
filename[:-3])] = True
|
||||
|
||||
cmdclass = dict()
|
||||
|
||||
def _find_modules(arg, dirname, files):
|
||||
for filename in files:
|
||||
if filename.endswith('.py') and filename != '__init__.py':
|
||||
arg["%s.%s" % (dirname.replace('/', '.'),
|
||||
filename[:-3])] = True
|
||||
class LocalSDist(sdist.sdist):
|
||||
"""Builds the ChangeLog and Authors files from VC first."""
|
||||
|
||||
class LocalSDist(sdist.sdist):
|
||||
"""Builds the ChangeLog and Authors files from VC first."""
|
||||
command_name = 'sdist'
|
||||
|
||||
def run(self):
|
||||
write_git_changelog()
|
||||
generate_authors()
|
||||
# sdist.sdist is an old style class, can't use super()
|
||||
sdist.sdist.run(self)
|
||||
def run(self):
|
||||
write_git_changelog()
|
||||
generate_authors()
|
||||
# sdist.sdist is an old style class, can't use super()
|
||||
sdist.sdist.run(self)
|
||||
|
||||
cmdclass['sdist'] = LocalSDist
|
||||
|
||||
# If Sphinx is installed on the box running setup.py,
|
||||
# enable setup.py to build the documentation, otherwise,
|
||||
# just ignore it
|
||||
try:
|
||||
from sphinx.setup_command import BuildDoc
|
||||
class LocalBuildDoc(BuildDoc):
|
||||
|
||||
class LocalBuildDoc(BuildDoc):
|
||||
command_name = 'build_sphinx'
|
||||
builders = ['html', 'man']
|
||||
|
||||
builders = ['html', 'man']
|
||||
|
||||
def generate_autoindex(self):
|
||||
log.info("[oslo.packaging] Autodocumenting from %s"
|
||||
% os.path.abspath(os.curdir))
|
||||
modules = {}
|
||||
option_dict = self.distribution.get_option_dict('build_sphinx')
|
||||
source_dir = os.path.join(option_dict['source_dir'][1], 'api')
|
||||
if not os.path.exists(source_dir):
|
||||
os.makedirs(source_dir)
|
||||
for pkg in self.distribution.packages:
|
||||
if '.' not in pkg:
|
||||
os.path.walk(pkg, _find_modules, modules)
|
||||
module_list = modules.keys()
|
||||
module_list.sort()
|
||||
autoindex_filename = os.path.join(source_dir, 'autoindex.rst')
|
||||
with open(autoindex_filename, 'w') as autoindex:
|
||||
autoindex.write(""".. toctree::
|
||||
:maxdepth: 1
|
||||
def generate_autoindex(self):
|
||||
log.info("[oslo.packaging] Autodocumenting from %s"
|
||||
% os.path.abspath(os.curdir))
|
||||
modules = {}
|
||||
option_dict = self.distribution.get_option_dict('build_sphinx')
|
||||
source_dir = os.path.join(option_dict['source_dir'][1], 'api')
|
||||
if not os.path.exists(source_dir):
|
||||
os.makedirs(source_dir)
|
||||
for pkg in self.distribution.packages:
|
||||
if '.' not in pkg:
|
||||
os.path.walk(pkg, _find_modules, modules)
|
||||
module_list = modules.keys()
|
||||
module_list.sort()
|
||||
autoindex_filename = os.path.join(source_dir, 'autoindex.rst')
|
||||
with open(autoindex_filename, 'w') as autoindex:
|
||||
autoindex.write(""".. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
""")
|
||||
for module in module_list:
|
||||
output_filename = os.path.join(source_dir,
|
||||
"%s.rst" % module)
|
||||
heading = "The :mod:`%s` Module" % module
|
||||
underline = "=" * len(heading)
|
||||
values = dict(module=module, heading=heading,
|
||||
underline=underline)
|
||||
for module in module_list:
|
||||
output_filename = os.path.join(source_dir,
|
||||
"%s.rst" % module)
|
||||
heading = "The :mod:`%s` Module" % module
|
||||
underline = "=" * len(heading)
|
||||
values = dict(module=module, heading=heading,
|
||||
underline=underline)
|
||||
|
||||
log.info("[oslo.packaging] Generating %s"
|
||||
% output_filename)
|
||||
with open(output_filename, 'w') as output_file:
|
||||
output_file.write(_rst_template % values)
|
||||
autoindex.write(" %s.rst\n" % module)
|
||||
log.info("[oslo.packaging] Generating %s"
|
||||
% output_filename)
|
||||
with open(output_filename, 'w') as output_file:
|
||||
output_file.write(_rst_template % values)
|
||||
autoindex.write(" %s.rst\n" % module)
|
||||
|
||||
def run(self):
|
||||
if not os.getenv('SPHINX_DEBUG'):
|
||||
self.generate_autoindex()
|
||||
def run(self):
|
||||
if not os.getenv('SPHINX_DEBUG'):
|
||||
self.generate_autoindex()
|
||||
|
||||
for builder in self.builders:
|
||||
self.builder = builder
|
||||
self.finalize_options()
|
||||
self.project = self.distribution.get_name()
|
||||
self.version = self.distribution.get_version()
|
||||
self.release = self.distribution.get_version()
|
||||
BuildDoc.run(self)
|
||||
for builder in self.builders:
|
||||
self.builder = builder
|
||||
self.finalize_options()
|
||||
self.project = self.distribution.get_name()
|
||||
self.version = self.distribution.get_version()
|
||||
self.release = self.distribution.get_version()
|
||||
BuildDoc.run(self)
|
||||
|
||||
class LocalBuildLatex(LocalBuildDoc):
|
||||
builders = ['latex']
|
||||
|
||||
cmdclass['build_sphinx'] = LocalBuildDoc
|
||||
cmdclass['build_sphinx_latex'] = LocalBuildLatex
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return cmdclass
|
||||
class LocalBuildLatex(LocalBuildDoc):
|
||||
builders = ['latex']
|
||||
command_name = 'build_sphinx_latex'
|
||||
|
||||
|
||||
def _get_revno(git_dir):
|
||||
@ -365,25 +351,3 @@ def get_version(package_name, pre_version=None):
|
||||
return version
|
||||
raise Exception("Versioning for this project requires either an sdist"
|
||||
" tarball, or access to an upstream git repository.")
|
||||
|
||||
|
||||
def smart_find_packages(package_list):
|
||||
"""Run find_packages the way we intend."""
|
||||
packages = []
|
||||
for pkg in package_list:
|
||||
pkg_path = pkg.replace('.', os.path.sep)
|
||||
packages.append(pkg)
|
||||
packages.extend(['%s.%s' % (pkg, f)
|
||||
for f in setuptools.find_packages(pkg_path)])
|
||||
return list(set(packages))
|
||||
|
||||
|
||||
def attr_filter(attrs):
|
||||
"""Filter attrs parsed from a setup.cfg to inject our defaults."""
|
||||
attrs['version'] = get_version(attrs['name'], attrs.get('version', None))
|
||||
attrs['cmdclass'] = get_cmdclass()
|
||||
attrs['install_requires'] = parse_requirements()
|
||||
attrs['dependency_links'] = parse_dependency_links()
|
||||
attrs['include_package_data'] = True
|
||||
attrs['packages'] = smart_find_packages(attrs['packages'])
|
||||
return attrs
|
||||
|
@ -1,632 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Copyright (C) 2013 Association of Universities for Research in Astronomy
|
||||
# (AURA)
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# 3. The name of AURA and its representatives may not be used to
|
||||
# endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
||||
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
# DAMAGE.
|
||||
|
||||
"""The code in this module is mostly copy/pasted out of the distutils2 source
|
||||
code, as recommended by Tarek Ziade. As such, it may be subject to some change
|
||||
as distutils2 development continues, and will have to be kept up to date.
|
||||
|
||||
I didn't want to use it directly from distutils2 itself, since I do not want it
|
||||
to be an installation dependency for our packages yet--it is still too unstable
|
||||
(the latest version on PyPI doesn't even install).
|
||||
"""
|
||||
|
||||
# These first two imports are not used, but are needed to get around an
|
||||
# irritating Python bug that can crop up when using ./setup.py test.
|
||||
# See: http://www.eby-sarna.com/pipermail/peak/2010-May/003355.html
|
||||
try:
|
||||
import multiprocessing
|
||||
_ = multiprocessing
|
||||
except ImportError:
|
||||
pass
|
||||
import logging
|
||||
_ = logging
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
import distutils.ccompiler
|
||||
|
||||
from distutils import log
|
||||
from distutils.errors import (DistutilsOptionError, DistutilsModuleError,
|
||||
DistutilsFileError)
|
||||
from setuptools.command.egg_info import manifest_maker
|
||||
from setuptools.dist import Distribution
|
||||
from setuptools.extension import Extension
|
||||
|
||||
try:
|
||||
from ConfigParser import RawConfigParser
|
||||
except ImportError:
|
||||
from configparser import RawConfigParser
|
||||
|
||||
from oslo.packaging import packaging
|
||||
|
||||
# A simplified RE for this; just checks that the line ends with version
|
||||
# predicates in ()
|
||||
_VERSION_SPEC_RE = re.compile(r'\s*(.*?)\s*\((.*)\)\s*$')
|
||||
|
||||
|
||||
# Mappings from setup() keyword arguments to setup.cfg options;
|
||||
# The values are (section, option) tuples, or simply (section,) tuples if
|
||||
# the option has the same name as the setup() argument
|
||||
D1_D2_SETUP_ARGS = {
|
||||
"name": ("metadata",),
|
||||
"version": ("metadata",),
|
||||
"author": ("metadata",),
|
||||
"author_email": ("metadata",),
|
||||
"maintainer": ("metadata",),
|
||||
"maintainer_email": ("metadata",),
|
||||
"url": ("metadata", "home_page"),
|
||||
"description": ("metadata", "summary"),
|
||||
"keywords": ("metadata",),
|
||||
"long_description": ("metadata", "description"),
|
||||
"download-url": ("metadata",),
|
||||
"classifiers": ("metadata", "classifier"),
|
||||
"platforms": ("metadata", "platform"), # **
|
||||
"license": ("metadata",),
|
||||
# Use setuptools install_requires, not
|
||||
# broken distutils requires
|
||||
"install_requires": ("metadata", "requires_dist"),
|
||||
"tests_require": ("metadata",),
|
||||
"setup_requires": ("metadata",),
|
||||
"provides": ("metadata", "provides_dist"), # **
|
||||
"obsoletes": ("metadata", "obsoletes_dist"), # **
|
||||
"package_dir": ("files", 'packages_root'),
|
||||
"packages": ("files",),
|
||||
"package_data": ("files",),
|
||||
"data_files": ("files",),
|
||||
"scripts": ("files",),
|
||||
"namespace_packages": ("files",),
|
||||
"py_modules": ("files", "modules"), # **
|
||||
"cmdclass": ("global", "commands"),
|
||||
"use_2to3": ("metadata",),
|
||||
"zip_safe": ("metadata",),
|
||||
}
|
||||
|
||||
# setup() arguments that can have multiple values in setup.cfg
|
||||
MULTI_FIELDS = ("classifiers",
|
||||
"platforms",
|
||||
"install_requires",
|
||||
"provides",
|
||||
"obsoletes",
|
||||
"packages",
|
||||
"namespace_packages",
|
||||
"package_data",
|
||||
"data_files",
|
||||
"scripts",
|
||||
"py_modules",
|
||||
"tests_require",
|
||||
"setup_requires",
|
||||
"cmdclass")
|
||||
|
||||
# setup() arguments that contain boolean values
|
||||
BOOL_FIELDS = ("use_2to3", "zip_safe")
|
||||
|
||||
|
||||
CSV_FIELDS = ("keywords",)
|
||||
|
||||
|
||||
log.set_verbosity(log.INFO)
|
||||
|
||||
|
||||
def resolve_name(name):
|
||||
"""Resolve a name like ``module.object`` to an object and return it.
|
||||
|
||||
Raise ImportError if the module or name is not found.
|
||||
"""
|
||||
|
||||
parts = name.split('.')
|
||||
cursor = len(parts) - 1
|
||||
module_name = parts[:cursor]
|
||||
attr_name = parts[-1]
|
||||
|
||||
while cursor > 0:
|
||||
try:
|
||||
ret = __import__('.'.join(module_name), fromlist=[attr_name])
|
||||
break
|
||||
except ImportError:
|
||||
if cursor == 0:
|
||||
raise
|
||||
cursor -= 1
|
||||
module_name = parts[:cursor]
|
||||
attr_name = parts[cursor]
|
||||
ret = ''
|
||||
|
||||
for part in parts[cursor:]:
|
||||
try:
|
||||
ret = getattr(ret, part)
|
||||
except AttributeError:
|
||||
raise ImportError(name)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def cfg_to_args(path='setup.cfg'):
|
||||
""" Distutils2 to distutils1 compatibility util.
|
||||
|
||||
This method uses an existing setup.cfg to generate a dictionary of
|
||||
keywords that can be used by distutils.core.setup(kwargs**).
|
||||
|
||||
:param file:
|
||||
The setup.cfg path.
|
||||
:raises DistutilsFileError:
|
||||
When the setup.cfg file is not found.
|
||||
|
||||
"""
|
||||
|
||||
# The method source code really starts here.
|
||||
parser = RawConfigParser()
|
||||
if not os.path.exists(path):
|
||||
raise DistutilsFileError("file '%s' does not exist" %
|
||||
os.path.abspath(path))
|
||||
parser.read(path)
|
||||
config = {}
|
||||
for section in parser.sections():
|
||||
config[section] = dict(parser.items(section))
|
||||
|
||||
# Run setup_hooks, if configured
|
||||
setup_hooks = has_get_option(config, 'global', 'setup_hooks')
|
||||
package_dir = has_get_option(config, 'files', 'packages_root')
|
||||
|
||||
# Add the source package directory to sys.path in case it contains
|
||||
# additional hooks, and to make sure it's on the path before any existing
|
||||
# installations of the package
|
||||
if package_dir:
|
||||
package_dir = os.path.abspath(package_dir)
|
||||
sys.path.insert(0, package_dir)
|
||||
|
||||
try:
|
||||
if setup_hooks:
|
||||
setup_hooks = split_multiline(setup_hooks)
|
||||
for hook in setup_hooks:
|
||||
hook_fn = resolve_name(hook)
|
||||
try:
|
||||
hook_fn(config)
|
||||
except:
|
||||
e = sys.exc_info()[1]
|
||||
log.error('setup hook %s raised exception: %s\n' %
|
||||
(hook, e))
|
||||
log.error(traceback.format_exc())
|
||||
sys.exit(1)
|
||||
|
||||
kwargs = setup_cfg_to_setup_kwargs(config)
|
||||
|
||||
register_custom_compilers(config)
|
||||
|
||||
ext_modules = get_extension_modules(config)
|
||||
if ext_modules:
|
||||
kwargs['ext_modules'] = ext_modules
|
||||
|
||||
entry_points = get_entry_points(config)
|
||||
if entry_points:
|
||||
kwargs['entry_points'] = entry_points
|
||||
|
||||
wrap_commands(kwargs)
|
||||
|
||||
# Handle the [files]/extra_files option
|
||||
extra_files = has_get_option(config, 'files', 'extra_files')
|
||||
if extra_files:
|
||||
extra_files = split_multiline(extra_files)
|
||||
# Let's do a sanity check
|
||||
for filename in extra_files:
|
||||
if not os.path.exists(filename):
|
||||
raise DistutilsFileError(
|
||||
'%s from the extra_files option in setup.cfg does not '
|
||||
'exist' % filename)
|
||||
|
||||
# Unfortunately the only really sensible way to do this is to
|
||||
# monkey-patch the manifest_maker class
|
||||
@monkeypatch_method(manifest_maker)
|
||||
def add_defaults(self, extra_files=extra_files, log=log):
|
||||
log.info('[oslo.packaging] running patched manifest_maker'
|
||||
' command with extra_files support')
|
||||
add_defaults._orig(self)
|
||||
self.filelist.extend(extra_files)
|
||||
|
||||
finally:
|
||||
# Perform cleanup if any paths were added to sys.path
|
||||
if package_dir:
|
||||
sys.path.pop(0)
|
||||
|
||||
return kwargs
|
||||
|
||||
|
||||
def filtered_args(path='setup.cfg'):
|
||||
"""Process and pass on the attrs dict."""
|
||||
return packaging.attr_filter(cfg_to_args(path))
|
||||
|
||||
|
||||
def setup_cfg_to_setup_kwargs(config):
|
||||
"""Processes the setup.cfg options and converts them to arguments accepted
|
||||
by setuptools' setup() function.
|
||||
"""
|
||||
|
||||
kwargs = {}
|
||||
|
||||
for arg in D1_D2_SETUP_ARGS:
|
||||
if len(D1_D2_SETUP_ARGS[arg]) == 2:
|
||||
# The distutils field name is different than distutils2's.
|
||||
section, option = D1_D2_SETUP_ARGS[arg]
|
||||
|
||||
elif len(D1_D2_SETUP_ARGS[arg]) == 1:
|
||||
# The distutils field name is the same thant distutils2's.
|
||||
section = D1_D2_SETUP_ARGS[arg][0]
|
||||
option = arg
|
||||
|
||||
in_cfg_value = has_get_option(config, section, option)
|
||||
if not in_cfg_value:
|
||||
|
||||
# There is no such option in the setup.cfg
|
||||
if arg == "long_description":
|
||||
in_cfg_value = has_get_option(config, section,
|
||||
"description_file")
|
||||
if in_cfg_value:
|
||||
in_cfg_value = split_multiline(in_cfg_value)
|
||||
value = ''
|
||||
for filename in in_cfg_value:
|
||||
description_file = open(filename)
|
||||
try:
|
||||
value += description_file.read().strip() + '\n\n'
|
||||
finally:
|
||||
description_file.close()
|
||||
in_cfg_value = value
|
||||
else:
|
||||
continue
|
||||
|
||||
if arg in CSV_FIELDS:
|
||||
in_cfg_value = split_csv(in_cfg_value)
|
||||
if arg in MULTI_FIELDS:
|
||||
in_cfg_value = split_multiline(in_cfg_value)
|
||||
elif arg in BOOL_FIELDS:
|
||||
# Provide some flexibility here...
|
||||
if in_cfg_value.lower() in ('true', 't', '1', 'yes', 'y'):
|
||||
in_cfg_value = True
|
||||
else:
|
||||
in_cfg_value = False
|
||||
|
||||
if in_cfg_value:
|
||||
if arg in ('install_requires', 'tests_require'):
|
||||
# Replaces PEP345-style version specs with the sort expected by
|
||||
# setuptools
|
||||
in_cfg_value = [_VERSION_SPEC_RE.sub(r'\1\2', pred)
|
||||
for pred in in_cfg_value]
|
||||
elif arg == 'package_dir':
|
||||
in_cfg_value = {'': in_cfg_value}
|
||||
elif arg in ('package_data', 'data_files'):
|
||||
data_files = {}
|
||||
firstline = True
|
||||
prev = None
|
||||
for line in in_cfg_value:
|
||||
if '=' in line:
|
||||
key, value = line.split('=', 1)
|
||||
key, value = (key.strip(), value.strip())
|
||||
if key in data_files:
|
||||
# Multiple duplicates of the same package name;
|
||||
# this is for backwards compatibility of the old
|
||||
# format prior to d2to1 0.2.6.
|
||||
prev = data_files[key]
|
||||
prev.extend(value.split())
|
||||
else:
|
||||
prev = data_files[key.strip()] = value.split()
|
||||
elif firstline:
|
||||
raise DistutilsOptionError(
|
||||
'malformed package_data first line %r (misses '
|
||||
'"=")' % line)
|
||||
else:
|
||||
prev.extend(line.strip().split())
|
||||
firstline = False
|
||||
if arg == 'data_files':
|
||||
# the data_files value is a pointlessly different structure
|
||||
# from the package_data value
|
||||
data_files = data_files.items()
|
||||
in_cfg_value = data_files
|
||||
elif arg == 'cmdclass':
|
||||
cmdclass = {}
|
||||
dist = Distribution()
|
||||
for cls in in_cfg_value:
|
||||
cls = resolve_name(cls)
|
||||
cmd = cls(dist)
|
||||
cmdclass[cmd.get_command_name()] = cls
|
||||
in_cfg_value = cmdclass
|
||||
|
||||
kwargs[arg] = in_cfg_value
|
||||
|
||||
return kwargs
|
||||
|
||||
|
||||
def register_custom_compilers(config):
|
||||
"""Handle custom compilers; this has no real equivalent in distutils, where
|
||||
additional compilers could only be added programmatically, so we have to
|
||||
hack it in somehow.
|
||||
"""
|
||||
|
||||
compilers = has_get_option(config, 'global', 'compilers')
|
||||
if compilers:
|
||||
compilers = split_multiline(compilers)
|
||||
for compiler in compilers:
|
||||
compiler = resolve_name(compiler)
|
||||
|
||||
# In distutils2 compilers these class attributes exist; for
|
||||
# distutils1 we just have to make something up
|
||||
if hasattr(compiler, 'name'):
|
||||
name = compiler.name
|
||||
else:
|
||||
name = compiler.__name__
|
||||
if hasattr(compiler, 'description'):
|
||||
desc = compiler.description
|
||||
else:
|
||||
desc = 'custom compiler %s' % name
|
||||
|
||||
module_name = compiler.__module__
|
||||
# Note; this *will* override built in compilers with the same name
|
||||
# TODO: Maybe display a warning about this?
|
||||
cc = distutils.ccompiler.compiler_class
|
||||
cc[name] = (module_name, compiler.__name__, desc)
|
||||
|
||||
# HACK!!!! Distutils assumes all compiler modules are in the
|
||||
# distutils package
|
||||
sys.modules['distutils.' + module_name] = sys.modules[module_name]
|
||||
|
||||
|
||||
def get_extension_modules(config):
|
||||
"""Handle extension modules"""
|
||||
|
||||
EXTENSION_FIELDS = ("sources",
|
||||
"include_dirs",
|
||||
"define_macros",
|
||||
"undef_macros",
|
||||
"library_dirs",
|
||||
"libraries",
|
||||
"runtime_library_dirs",
|
||||
"extra_objects",
|
||||
"extra_compile_args",
|
||||
"extra_link_args",
|
||||
"export_symbols",
|
||||
"swig_opts",
|
||||
"depends")
|
||||
|
||||
ext_modules = []
|
||||
for section in config:
|
||||
if ':' in section:
|
||||
labels = section.split(':', 1)
|
||||
else:
|
||||
# Backwards compatibility for old syntax; don't use this though
|
||||
labels = section.split('=', 1)
|
||||
labels = [l.strip() for l in labels]
|
||||
if (len(labels) == 2) and (labels[0] == 'extension'):
|
||||
ext_args = {}
|
||||
for field in EXTENSION_FIELDS:
|
||||
value = has_get_option(config, section, field)
|
||||
# All extension module options besides name can have multiple
|
||||
# values
|
||||
if not value:
|
||||
continue
|
||||
value = split_multiline(value)
|
||||
if field == 'define_macros':
|
||||
macros = []
|
||||
for macro in value:
|
||||
macro = macro.split('=', 1)
|
||||
if len(macro) == 1:
|
||||
macro = (macro[0].strip(), None)
|
||||
else:
|
||||
macro = (macro[0].strip(), macro[1].strip())
|
||||
macros.append(macro)
|
||||
value = macros
|
||||
ext_args[field] = value
|
||||
if ext_args:
|
||||
if 'name' not in ext_args:
|
||||
ext_args['name'] = labels[1]
|
||||
ext_modules.append(Extension(ext_args.pop('name'),
|
||||
**ext_args))
|
||||
return ext_modules
|
||||
|
||||
|
||||
def get_entry_points(config):
|
||||
"""Process the [entry_points] section of setup.cfg to handle setuptools
|
||||
entry points. This is, of course, not a standard feature of
|
||||
distutils2/packaging, but as there is not currently a standard alternative
|
||||
in packaging, we provide support for them.
|
||||
"""
|
||||
|
||||
if not 'entry_points' in config:
|
||||
return {}
|
||||
|
||||
return dict((option, split_multiline(value))
|
||||
for option, value in config['entry_points'].items())
|
||||
|
||||
|
||||
def wrap_commands(kwargs):
|
||||
dist = Distribution()
|
||||
|
||||
# This should suffice to get the same config values and command classes
|
||||
# that the actual Distribution will see (not counting cmdclass, which is
|
||||
# handled below)
|
||||
dist.parse_config_files()
|
||||
|
||||
for cmd, _ in dist.get_command_list():
|
||||
hooks = {}
|
||||
for opt, val in dist.get_option_dict(cmd).items():
|
||||
val = val[1]
|
||||
if opt.startswith('pre_hook.') or opt.startswith('post_hook.'):
|
||||
hook_type, alias = opt.split('.', 1)
|
||||
hook_dict = hooks.setdefault(hook_type, {})
|
||||
hook_dict[alias] = val
|
||||
if not hooks:
|
||||
continue
|
||||
|
||||
if 'cmdclass' in kwargs and cmd in kwargs['cmdclass']:
|
||||
cmdclass = kwargs['cmdclass'][cmd]
|
||||
else:
|
||||
cmdclass = dist.get_command_class(cmd)
|
||||
|
||||
new_cmdclass = wrap_command(cmd, cmdclass, hooks)
|
||||
kwargs.setdefault('cmdclass', {})[cmd] = new_cmdclass
|
||||
|
||||
|
||||
def wrap_command(cmd, cmdclass, hooks):
|
||||
def run(self, cmdclass=cmdclass):
|
||||
self.run_command_hooks('pre_hook')
|
||||
cmdclass.run(self)
|
||||
self.run_command_hooks('post_hook')
|
||||
|
||||
return type(cmd, (cmdclass, object),
|
||||
{'run': run, 'run_command_hooks': run_command_hooks,
|
||||
'pre_hook': hooks.get('pre_hook'),
|
||||
'post_hook': hooks.get('post_hook')})
|
||||
|
||||
|
||||
def run_command_hooks(cmd_obj, hook_kind):
|
||||
"""Run hooks registered for that command and phase.
|
||||
|
||||
*cmd_obj* is a finalized command object; *hook_kind* is either
|
||||
'pre_hook' or 'post_hook'.
|
||||
"""
|
||||
|
||||
if hook_kind not in ('pre_hook', 'post_hook'):
|
||||
raise ValueError('invalid hook kind: %r' % hook_kind)
|
||||
|
||||
hooks = getattr(cmd_obj, hook_kind, None)
|
||||
|
||||
if hooks is None:
|
||||
return
|
||||
|
||||
for hook in hooks.values():
|
||||
if isinstance(hook, str):
|
||||
try:
|
||||
hook_obj = resolve_name(hook)
|
||||
except ImportError:
|
||||
err = sys.exc_info()[1] # For py3k
|
||||
raise DistutilsModuleError('cannot find hook %s: %s' %
|
||||
(hook, err))
|
||||
else:
|
||||
hook_obj = hook
|
||||
|
||||
if not hasattr(hook_obj, '__call__'):
|
||||
raise DistutilsOptionError('hook %r is not callable' % hook)
|
||||
|
||||
log.info('running %s %s for command %s',
|
||||
hook_kind, hook, cmd_obj.get_command_name())
|
||||
|
||||
try:
|
||||
hook_obj(cmd_obj)
|
||||
except:
|
||||
e = sys.exc_info()[1]
|
||||
log.error('hook %s raised exception: %s\n' % (hook, e))
|
||||
log.error(traceback.format_exc())
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def has_get_option(config, section, option):
|
||||
if section in config and option in config[section]:
|
||||
return config[section][option]
|
||||
elif section in config and option.replace('_', '-') in config[section]:
|
||||
return config[section][option.replace('_', '-')]
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def split_multiline(value):
|
||||
"""Special behaviour when we have a multi line options"""
|
||||
|
||||
value = [element for element in
|
||||
(line.strip() for line in value.split('\n'))
|
||||
if element]
|
||||
return value
|
||||
|
||||
|
||||
def split_csv(value):
|
||||
"""Special behaviour when we have a comma separated options"""
|
||||
|
||||
value = [element for element in
|
||||
(chunk.strip() for chunk in value.split(','))
|
||||
if element]
|
||||
return value
|
||||
|
||||
|
||||
def monkeypatch_method(cls):
|
||||
"""A function decorator to monkey-patch a method of the same name on the
|
||||
given class.
|
||||
"""
|
||||
|
||||
def wrapper(func):
|
||||
orig = getattr(cls, func.__name__, None)
|
||||
if orig and not hasattr(orig, '_orig'): # Already patched
|
||||
setattr(func, '_orig', orig)
|
||||
setattr(cls, func.__name__, func)
|
||||
return func
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
# The following classes are used to hack Distribution.command_options a bit
|
||||
class DefaultGetDict(defaultdict):
|
||||
"""Like defaultdict, but the get() method also sets and returns the default
|
||||
value.
|
||||
"""
|
||||
|
||||
def get(self, key, default=None):
|
||||
if default is None:
|
||||
default = self.default_factory()
|
||||
return super(DefaultGetDict, self).setdefault(key, default)
|
||||
|
||||
|
||||
class IgnoreDict(dict):
|
||||
"""A dictionary that ignores any insertions in which the key is a string
|
||||
matching any string in `ignore`. The ignore list can also contain wildcard
|
||||
patterns using '*'.
|
||||
"""
|
||||
|
||||
def __init__(self, ignore):
|
||||
self.__ignore = re.compile(r'(%s)' % ('|'.join(
|
||||
[pat.replace('*', '.*')
|
||||
for pat in ignore])))
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
if self.__ignore.match(key):
|
||||
return
|
||||
super(IgnoreDict, self).__setitem__(key, val)
|
@ -22,8 +22,6 @@ packages =
|
||||
namespace_packages =
|
||||
oslo
|
||||
|
||||
[entry_points]
|
||||
distutils.setup_keywords =
|
||||
oslo_packaging = oslo.packaging.core:setup
|
||||
oslo.packaging.attr_filters =
|
||||
oslo_packaging = oslo.packaging.packaging:attr_filter
|
||||
[global]
|
||||
setup-hooks =
|
||||
oslo.packaging.hooks.setup_hook
|
||||
|
10
setup.py
10
setup.py
@ -16,10 +16,6 @@
|
||||
|
||||
import setuptools
|
||||
|
||||
# See setup.cfg for the project metadata.
|
||||
from oslo.packaging import util
|
||||
|
||||
|
||||
# Use our internals directly, so that we don't chicken-and-egg needing to
|
||||
# install an entry point before using ourself.
|
||||
setuptools.setup(**util.filtered_args())
|
||||
setuptools.setup(
|
||||
setup_requires = ['d2to1'],
|
||||
d2to1 = True)
|
||||
|
Loading…
Reference in New Issue
Block a user