Make initial structural changes to keystoneclient in preparation
to moving auth_token here from keystone. No functional change should occur from this commit (even though it did refresh a newer copy of openstack.common.setup.py, none of the newer updates are in functions called from this client) blueprint authtoken-to-keystoneclient-repo Change-Id: Ie54feb73e0e34b56400fdaa8a8f876f9284bac27changes/12/178912/1
parent
4ed4fb4d49
commit
86acb5657e
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,130 @@
|
|||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2012 OpenStack LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
def __init__(self, message, lineno, line):
|
||||
self.msg = message
|
||||
self.line = line
|
||||
self.lineno = lineno
|
||||
|
||||
def __str__(self):
|
||||
return 'at line %d, %s: %r' % (self.lineno, self.msg, self.line)
|
||||
|
||||
|
||||
class BaseParser(object):
|
||||
lineno = 0
|
||||
parse_exc = ParseError
|
||||
|
||||
def _assignment(self, key, value):
|
||||
self.assignment(key, value)
|
||||
return None, []
|
||||
|
||||
def _get_section(self, line):
|
||||
if line[-1] != ']':
|
||||
return self.error_no_section_end_bracket(line)
|
||||
if len(line) <= 2:
|
||||
return self.error_no_section_name(line)
|
||||
|
||||
return line[1:-1]
|
||||
|
||||
def _split_key_value(self, line):
|
||||
colon = line.find(':')
|
||||
equal = line.find('=')
|
||||
if colon < 0 and equal < 0:
|
||||
return self.error_invalid_assignment(line)
|
||||
|
||||
if colon < 0 or (equal >= 0 and equal < colon):
|
||||
key, value = line[:equal], line[equal + 1:]
|
||||
else:
|
||||
key, value = line[:colon], line[colon + 1:]
|
||||
|
||||
value = value.strip()
|
||||
if ((value and value[0] == value[-1]) and
|
||||
(value[0] == "\"" or value[0] == "'")):
|
||||
value = value[1:-1]
|
||||
return key.strip(), [value]
|
||||
|
||||
def parse(self, lineiter):
|
||||
key = None
|
||||
value = []
|
||||
|
||||
for line in lineiter:
|
||||
self.lineno += 1
|
||||
|
||||
line = line.rstrip()
|
||||
if not line:
|
||||
# Blank line, ends multi-line values
|
||||
if key:
|
||||
key, value = self._assignment(key, value)
|
||||
continue
|
||||
elif line[0] in (' ', '\t'):
|
||||
# Continuation of previous assignment
|
||||
if key is None:
|
||||
self.error_unexpected_continuation(line)
|
||||
else:
|
||||
value.append(line.lstrip())
|
||||
continue
|
||||
|
||||
if key:
|
||||
# Flush previous assignment, if any
|
||||
key, value = self._assignment(key, value)
|
||||
|
||||
if line[0] == '[':
|
||||
# Section start
|
||||
section = self._get_section(line)
|
||||
if section:
|
||||
self.new_section(section)
|
||||
elif line[0] in '#;':
|
||||
self.comment(line[1:].lstrip())
|
||||
else:
|
||||
key, value = self._split_key_value(line)
|
||||
if not key:
|
||||
return self.error_empty_key(line)
|
||||
|
||||
if key:
|
||||
# Flush previous assignment, if any
|
||||
self._assignment(key, value)
|
||||
|
||||
def assignment(self, key, value):
|
||||
"""Called when a full assignment is parsed"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def new_section(self, section):
|
||||
"""Called when a new section is started"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def comment(self, comment):
|
||||
"""Called when a comment is parsed"""
|
||||
pass
|
||||
|
||||
def error_invalid_assignment(self, line):
|
||||
raise self.parse_exc("No ':' or '=' found in assignment",
|
||||
self.lineno, line)
|
||||
|
||||
def error_empty_key(self, line):
|
||||
raise self.parse_exc('Key cannot be empty', self.lineno, line)
|
||||
|
||||
def error_unexpected_continuation(self, line):
|
||||
raise self.parse_exc('Unexpected continuation line',
|
||||
self.lineno, line)
|
||||
|
||||
def error_no_section_end_bracket(self, line):
|
||||
raise self.parse_exc('Invalid section (must end with ])',
|
||||
self.lineno, line)
|
||||
|
||||
def error_no_section_name(self, line):
|
||||
raise self.parse_exc('Empty section name', self.lineno, line)
|
|
@ -0,0 +1,148 @@
|
|||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2011 Justin Santa Barbara
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
'''
|
||||
JSON related utilities.
|
||||
|
||||
This module provides a few things:
|
||||
|
||||
1) A handy function for getting an object down to something that can be
|
||||
JSON serialized. See to_primitive().
|
||||
|
||||
2) Wrappers around loads() and dumps(). The dumps() wrapper will
|
||||
automatically use to_primitive() for you if needed.
|
||||
|
||||
3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
|
||||
is available.
|
||||
'''
|
||||
|
||||
|
||||
import datetime
|
||||
import inspect
|
||||
import itertools
|
||||
import json
|
||||
import xmlrpclib
|
||||
|
||||
from keystoneclient.openstack.common import timeutils
|
||||
|
||||
|
||||
def to_primitive(value, convert_instances=False, level=0):
|
||||
"""Convert a complex object into primitives.
|
||||
|
||||
Handy for JSON serialization. We can optionally handle instances,
|
||||
but since this is a recursive function, we could have cyclical
|
||||
data structures.
|
||||
|
||||
To handle cyclical data structures we could track the actual objects
|
||||
visited in a set, but not all objects are hashable. Instead we just
|
||||
track the depth of the object inspections and don't go too deep.
|
||||
|
||||
Therefore, convert_instances=True is lossy ... be aware.
|
||||
|
||||
"""
|
||||
nasty = [inspect.ismodule, inspect.isclass, inspect.ismethod,
|
||||
inspect.isfunction, inspect.isgeneratorfunction,
|
||||
inspect.isgenerator, inspect.istraceback, inspect.isframe,
|
||||
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
|
||||
inspect.isabstract]
|
||||
for test in nasty:
|
||||
if test(value):
|
||||
return unicode(value)
|
||||
|
||||
# value of itertools.count doesn't get caught by inspects
|
||||
# above and results in infinite loop when list(value) is called.
|
||||
if type(value) == itertools.count:
|
||||
return unicode(value)
|
||||
|
||||
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
|
||||
# tests that raise an exception in a mocked method that
|
||||
# has a @wrap_exception with a notifier will fail. If
|
||||
# we up the dependency to 0.5.4 (when it is released) we
|
||||
# can remove this workaround.
|
||||
if getattr(value, '__module__', None) == 'mox':
|
||||
return 'mock'
|
||||
|
||||
if level > 3:
|
||||
return '?'
|
||||
|
||||
# The try block may not be necessary after the class check above,
|
||||
# but just in case ...
|
||||
try:
|
||||
# It's not clear why xmlrpclib created their own DateTime type, but
|
||||
# for our purposes, make it a datetime type which is explicitly
|
||||
# handled
|
||||
if isinstance(value, xmlrpclib.DateTime):
|
||||
value = datetime.datetime(*tuple(value.timetuple())[:6])
|
||||
|
||||
if isinstance(value, (list, tuple)):
|
||||
o = []
|
||||
for v in value:
|
||||
o.append(to_primitive(v, convert_instances=convert_instances,
|
||||
level=level))
|
||||
return o
|
||||
elif isinstance(value, dict):
|
||||
o = {}
|
||||
for k, v in value.iteritems():
|
||||
o[k] = to_primitive(v, convert_instances=convert_instances,
|
||||
level=level)
|
||||
return o
|
||||
elif isinstance(value, datetime.datetime):
|
||||
return timeutils.strtime(value)
|
||||
elif hasattr(value, 'iteritems'):
|
||||
return to_primitive(dict(value.iteritems()),
|
||||
convert_instances=convert_instances,
|
||||
level=level + 1)
|
||||
elif hasattr(value, '__iter__'):
|
||||
return to_primitive(list(value),
|
||||
convert_instances=convert_instances,
|
||||
level=level)
|
||||
elif convert_instances and hasattr(value, '__dict__'):
|
||||
# Likely an instance of something. Watch for cycles.
|
||||
# Ignore class member vars.
|
||||
return to_primitive(value.__dict__,
|
||||
convert_instances=convert_instances,
|
||||
level=level + 1)
|
||||
else:
|
||||
return value
|
||||
except TypeError, e:
|
||||
# Class objects are tricky since they may define something like
|
||||
# __iter__ defined but it isn't callable as list().
|
||||
return unicode(value)
|
||||
|
||||
|
||||
def dumps(value, default=to_primitive, **kwargs):
|
||||
return json.dumps(value, default=default, **kwargs)
|
||||
|
||||
|
||||
def loads(s):
|
||||
return json.loads(s)
|
||||
|
||||
|
||||
def load(s):
|
||||
return json.load(s)
|
||||
|
||||
|
||||
try:
|
||||
import anyjson
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
anyjson._modules.append((__name__, 'dumps', TypeError,
|
||||
'loads', ValueError, 'load'))
|
||||
anyjson.force_implementation(__name__)
|
|
@ -31,12 +31,13 @@ from setuptools.command import sdist
|
|||
def parse_mailmap(mailmap='.mailmap'):
|
||||
mapping = {}
|
||||
if os.path.exists(mailmap):
|
||||
fp = open(mailmap, 'r')
|
||||
for l in fp:
|
||||
l = l.strip()
|
||||
if not l.startswith('#') and ' ' in l:
|
||||
canonical_email, alias = l.split(' ')
|
||||
mapping[alias] = canonical_email
|
||||
with open(mailmap, 'r') as fp:
|
||||
for l in fp:
|
||||
l = l.strip()
|
||||
if not l.startswith('#') and ' ' in l:
|
||||
canonical_email, alias = [x for x in l.split(' ')
|
||||
if x.startswith('<')]
|
||||
mapping[alias] = canonical_email
|
||||
return mapping
|
||||
|
||||
|
||||
|
@ -51,10 +52,10 @@ def canonicalize_emails(changelog, mapping):
|
|||
|
||||
# Get requirements from the first file that exists
|
||||
def get_reqs_from_files(requirements_files):
|
||||
reqs_in = []
|
||||
for requirements_file in requirements_files:
|
||||
if os.path.exists(requirements_file):
|
||||
return open(requirements_file, 'r').read().split('\n')
|
||||
with open(requirements_file, 'r') as fil:
|
||||
return fil.read().split('\n')
|
||||
return []
|
||||
|
||||
|
||||
|
@ -139,11 +140,19 @@ def _get_git_next_version_suffix(branch_name):
|
|||
_run_shell_command("git fetch origin +refs/meta/*:refs/remotes/meta/*")
|
||||
milestone_cmd = "git show meta/openstack/release:%s" % branch_name
|
||||
milestonever = _run_shell_command(milestone_cmd)
|
||||
if not milestonever:
|
||||
milestonever = ""
|
||||
if milestonever:
|
||||
first_half = "%s~%s" % (milestonever, datestamp)
|
||||
else:
|
||||
first_half = datestamp
|
||||
|
||||
post_version = _get_git_post_version()
|
||||
revno = post_version.split(".")[-1]
|
||||
return "%s~%s.%s%s" % (milestonever, datestamp, revno_prefix, revno)
|
||||
# post version should look like:
|
||||
# 0.1.1.4.gcc9e28a
|
||||
# where the bit after the last . is the short sha, and the bit between
|
||||
# the last and second to last is the revno count
|
||||
(revno, sha) = post_version.split(".")[-2:]
|
||||
second_half = "%s%s.%s" % (revno_prefix, revno, sha)
|
||||
return ".".join((first_half, second_half))
|
||||
|
||||
|
||||
def _get_git_current_tag():
|
||||
|
@ -165,39 +174,48 @@ def _get_git_post_version():
|
|||
cmd = "git --no-pager log --oneline"
|
||||
out = _run_shell_command(cmd)
|
||||
revno = len(out.split("\n"))
|
||||
sha = _run_shell_command("git describe --always")
|
||||
else:
|
||||
tag_infos = tag_info.split("-")
|
||||
base_version = "-".join(tag_infos[:-2])
|
||||
revno = tag_infos[-2]
|
||||
return "%s.%s" % (base_version, revno)
|
||||
(revno, sha) = tag_infos[-2:]
|
||||
return "%s.%s.%s" % (base_version, revno, sha)
|
||||
|
||||
|
||||
def write_git_changelog():
|
||||
"""Write a changelog based on the git changelog."""
|
||||
if os.path.isdir('.git'):
|
||||
git_log_cmd = 'git log --stat'
|
||||
changelog = _run_shell_command(git_log_cmd)
|
||||
mailmap = parse_mailmap()
|
||||
with open("ChangeLog", "w") as changelog_file:
|
||||
changelog_file.write(canonicalize_emails(changelog, mailmap))
|
||||
new_changelog = 'ChangeLog'
|
||||
if not os.getenv('SKIP_WRITE_GIT_CHANGELOG'):
|
||||
if os.path.isdir('.git'):
|
||||
git_log_cmd = 'git log --stat'
|
||||
changelog = _run_shell_command(git_log_cmd)
|
||||
mailmap = parse_mailmap()
|
||||
with open(new_changelog, "w") as changelog_file:
|
||||
changelog_file.write(canonicalize_emails(changelog, mailmap))
|
||||
else:
|
||||
open(new_changelog, 'w').close()
|
||||
|
||||
|
||||
def generate_authors():
|
||||
"""Create AUTHORS file using git commits."""
|
||||
jenkins_email = 'jenkins@review.openstack.org'
|
||||
jenkins_email = 'jenkins@review.(openstack|stackforge).org'
|
||||
old_authors = 'AUTHORS.in'
|
||||
new_authors = 'AUTHORS'
|
||||
if os.path.isdir('.git'):
|
||||
# don't include jenkins email address in AUTHORS file
|
||||
git_log_cmd = ("git log --format='%aN <%aE>' | sort -u | "
|
||||
"grep -v " + jenkins_email)
|
||||
changelog = _run_shell_command(git_log_cmd)
|
||||
mailmap = parse_mailmap()
|
||||
with open(new_authors, 'w') as new_authors_fh:
|
||||
new_authors_fh.write(canonicalize_emails(changelog, mailmap))
|
||||
if os.path.exists(old_authors):
|
||||
with open(old_authors, "r") as old_authors_fh:
|
||||
new_authors_fh.write('\n' + old_authors_fh.read())
|
||||
if not os.getenv('SKIP_GENERATE_AUTHORS'):
|
||||
if os.path.isdir('.git'):
|
||||
# don't include jenkins email address in AUTHORS file
|
||||
git_log_cmd = ("git log --format='%aN <%aE>' | sort -u | "
|
||||
"egrep -v '" + jenkins_email + "'")
|
||||
changelog = _run_shell_command(git_log_cmd)
|
||||
mailmap = parse_mailmap()
|
||||
with open(new_authors, 'w') as new_authors_fh:
|
||||
new_authors_fh.write(canonicalize_emails(changelog, mailmap))
|
||||
if os.path.exists(old_authors):
|
||||
with open(old_authors, "r") as old_authors_fh:
|
||||
new_authors_fh.write('\n' + old_authors_fh.read())
|
||||
else:
|
||||
open(new_authors, 'w').close()
|
||||
|
||||
|
||||
_rst_template = """%(heading)s
|
||||
%(underline)s
|
||||
|
@ -211,7 +229,7 @@ _rst_template = """%(heading)s
|
|||
|
||||
def read_versioninfo(project):
|
||||
"""Read the versioninfo file. If it doesn't exist, we're in a github
|
||||
zipball, and there's really know way to know what version we really
|
||||
zipball, and there's really no way to know what version we really
|
||||
are, but that should be ok, because the utility of that should be
|
||||
just about nil if this code path is in use in the first place."""
|
||||
versioninfo_path = os.path.join(project, 'versioninfo')
|
||||
|
@ -225,7 +243,8 @@ def read_versioninfo(project):
|
|||
|
||||
def write_versioninfo(project, version):
|
||||
"""Write a simple file containing the version of the package."""
|
||||
open(os.path.join(project, 'versioninfo'), 'w').write("%s\n" % version)
|
||||
with open(os.path.join(project, 'versioninfo'), 'w') as fil:
|
||||
fil.write("%s\n" % version)
|
||||
|
||||
|
||||
def get_cmdclass():
|
||||
|
@ -316,7 +335,8 @@ def get_git_branchname():
|
|||
|
||||
|
||||
def get_pre_version(projectname, base_version):
|
||||
"""Return a version which is based"""
|
||||
"""Return a version which is leading up to a version that will
|
||||
be released in the future."""
|
||||
if os.path.isdir('.git'):
|
||||
current_tag = _get_git_current_tag()
|
||||
if current_tag is not None:
|
||||
|
@ -328,10 +348,10 @@ def get_pre_version(projectname, base_version):
|
|||
version_suffix = _get_git_next_version_suffix(branch_name)
|
||||
version = "%s~%s" % (base_version, version_suffix)
|
||||
write_versioninfo(projectname, version)
|
||||
return version.split('~')[0]
|
||||
return version
|
||||
else:
|
||||
version = read_versioninfo(projectname)
|
||||
return version.split('~')[0]
|
||||
return version
|
||||
|
||||
|
||||
def get_post_version(projectname):
|
||||
|
|
|
@ -0,0 +1,137 @@
|
|||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Time related utilities and helper functions.
|
||||
"""
|
||||
|
||||
import calendar
|
||||
import datetime
|
||||
|
||||
import iso8601
|
||||
|
||||
|
||||
TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
|
||||
PERFECT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
|
||||
|
||||
|
||||
def isotime(at=None):
|
||||
"""Stringify time in ISO 8601 format"""
|
||||
if not at:
|
||||
at = utcnow()
|
||||
str = at.strftime(TIME_FORMAT)
|
||||
tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
|
||||
str += ('Z' if tz == 'UTC' else tz)
|
||||
return str
|
||||
|
||||
|
||||
def parse_isotime(timestr):
|
||||
"""Parse time from ISO 8601 format"""
|
||||
try:
|
||||
return iso8601.parse_date(timestr)
|
||||
except iso8601.ParseError as e:
|
||||
raise ValueError(e.message)
|
||||
except TypeError as e:
|
||||
raise ValueError(e.message)
|
||||
|
||||
|
||||
def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
|
||||
"""Returns formatted utcnow."""
|
||||
if not at:
|
||||
at = utcnow()
|
||||
return at.strftime(fmt)
|
||||
|
||||
|
||||
def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT):
|
||||
"""Turn a formatted time back into a datetime."""
|
||||
return datetime.datetime.strptime(timestr, fmt)
|
||||
|
||||
|
||||
def normalize_time(timestamp):
|
||||
"""Normalize time in arbitrary timezone to UTC naive object"""
|
||||
offset = timestamp.utcoffset()
|
||||
if offset is None:
|
||||
return timestamp
|
||||
return timestamp.replace(tzinfo=None) - offset
|
||||
|
||||
|
||||
def is_older_than(before, seconds):
|
||||
"""Return True if before is older than seconds."""
|
||||
return utcnow() - before > datetime.timedelta(seconds=seconds)
|
||||
|
||||
|
||||
def is_newer_than(after, seconds):
|
||||
"""Return True if after is newer than seconds."""
|
||||
return after - utcnow() > datetime.timedelta(seconds=seconds)
|
||||
|
||||
|
||||
def utcnow_ts():
|
||||
"""Timestamp version of our utcnow function."""
|
||||
return calendar.timegm(utcnow().timetuple())
|
||||
|
||||
|
||||
def utcnow():
|
||||
"""Overridable version of utils.utcnow."""
|
||||
if utcnow.override_time:
|
||||
return utcnow.override_time
|
||||
return datetime.datetime.utcnow()
|
||||
|
||||
|
||||
utcnow.override_time = None
|
||||
|
||||
|
||||
def set_time_override(override_time=datetime.datetime.utcnow()):
|
||||
"""Override utils.utcnow to return a constant time."""
|
||||
utcnow.override_time = override_time
|
||||
|
||||
|
||||
def advance_time_delta(timedelta):
|
||||
"""Advance overridden time using a datetime.timedelta."""
|
||||
assert(not utcnow.override_time is None)
|
||||
utcnow.override_time += timedelta
|
||||
|
||||
|
||||
def advance_time_seconds(seconds):
|
||||
"""Advance overridden time by seconds."""
|
||||
advance_time_delta(datetime.timedelta(0, seconds))
|
||||
|
||||
|
||||
def clear_time_override():
|
||||
"""Remove the overridden time."""
|
||||
utcnow.override_time = None
|
||||
|
||||
|
||||
def marshall_now(now=None):
|
||||
"""Make an rpc-safe datetime with microseconds.
|
||||
|
||||
Note: tzinfo is stripped, but not required for relative times."""
|
||||
if not now:
|
||||
now = utcnow()
|
||||
return dict(day=now.day, month=now.month, year=now.year, hour=now.hour,
|
||||
minute=now.minute, second=now.second,
|
||||
microsecond=now.microsecond)
|
||||
|
||||
|
||||
def unmarshall_time(tyme):
|
||||
"""Unmarshall a datetime dict."""
|
||||
return datetime.datetime(day=tyme['day'],
|
||||
month=tyme['month'],
|
||||
year=tyme['year'],
|
||||
hour=tyme['hour'],
|
||||
minute=tyme['minute'],
|
||||
second=tyme['second'],
|
||||
microsecond=tyme['microsecond'])
|
|
@ -1,7 +1,7 @@
|
|||
[DEFAULT]
|
||||
|
||||
# The list of modules to copy from openstack-common
|
||||
modules=setup
|
||||
modules=setup,cfg,iniparser,jsonutils,timeutils
|
||||
|
||||
# The base module to hold the copy of openstack.common
|
||||
base=keystoneclient
|
||||
|
|
Loading…
Reference in New Issue