Perform oslo-incubator code sync.
run_tests.sh cannot install a virtualenv with tools/install_venv.py as tools/install_venv_common.py depends on oslo.config which is an external dependency. The virtualenv installer should not depend on any external dependencies. Fix this by performing a code sync with oslo-incubator. Note this adds a new dependency on six and changes the way versioning is determined. Change-Id: Ifb67bbfeec4eabc943c2a88a34fe535c57f174a1changes/12/178912/1
parent
811c6c0dd3
commit
b7cc2778c6
|
@ -3,7 +3,6 @@ include AUTHORS HACKING LICENSE
|
|||
include ChangeLog
|
||||
include run_tests.sh tox.ini
|
||||
include python-keystoneclient
|
||||
include keystoneclient/versioninfo
|
||||
recursive-include doc *
|
||||
recursive-include tests *
|
||||
recursive-include tools *
|
||||
|
|
|
@ -34,15 +34,29 @@ This module provides a few things:
|
|||
|
||||
|
||||
import datetime
|
||||
import functools
|
||||
import inspect
|
||||
import itertools
|
||||
import json
|
||||
import types
|
||||
import xmlrpclib
|
||||
|
||||
import six
|
||||
|
||||
from keystoneclient.openstack.common import timeutils
|
||||
|
||||
|
||||
def to_primitive(value, convert_instances=False, level=0):
|
||||
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
|
||||
inspect.isfunction, inspect.isgeneratorfunction,
|
||||
inspect.isgenerator, inspect.istraceback, inspect.isframe,
|
||||
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
|
||||
inspect.isabstract]
|
||||
|
||||
_simple_types = (types.NoneType, int, basestring, bool, float, long)
|
||||
|
||||
|
||||
def to_primitive(value, convert_instances=False, convert_datetime=True,
|
||||
level=0, max_depth=3):
|
||||
"""Convert a complex object into primitives.
|
||||
|
||||
Handy for JSON serialization. We can optionally handle instances,
|
||||
|
@ -56,19 +70,32 @@ def to_primitive(value, convert_instances=False, level=0):
|
|||
Therefore, convert_instances=True is lossy ... be aware.
|
||||
|
||||
"""
|
||||
nasty = [inspect.ismodule, inspect.isclass, inspect.ismethod,
|
||||
inspect.isfunction, inspect.isgeneratorfunction,
|
||||
inspect.isgenerator, inspect.istraceback, inspect.isframe,
|
||||
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
|
||||
inspect.isabstract]
|
||||
for test in nasty:
|
||||
if test(value):
|
||||
return unicode(value)
|
||||
# handle obvious types first - order of basic types determined by running
|
||||
# full tests on nova project, resulting in the following counts:
|
||||
# 572754 <type 'NoneType'>
|
||||
# 460353 <type 'int'>
|
||||
# 379632 <type 'unicode'>
|
||||
# 274610 <type 'str'>
|
||||
# 199918 <type 'dict'>
|
||||
# 114200 <type 'datetime.datetime'>
|
||||
# 51817 <type 'bool'>
|
||||
# 26164 <type 'list'>
|
||||
# 6491 <type 'float'>
|
||||
# 283 <type 'tuple'>
|
||||
# 19 <type 'long'>
|
||||
if isinstance(value, _simple_types):
|
||||
return value
|
||||
|
||||
# value of itertools.count doesn't get caught by inspects
|
||||
# above and results in infinite loop when list(value) is called.
|
||||
if isinstance(value, datetime.datetime):
|
||||
if convert_datetime:
|
||||
return timeutils.strtime(value)
|
||||
else:
|
||||
return value
|
||||
|
||||
# value of itertools.count doesn't get caught by nasty_type_tests
|
||||
# and results in infinite loop when list(value) is called.
|
||||
if type(value) == itertools.count:
|
||||
return unicode(value)
|
||||
return six.text_type(value)
|
||||
|
||||
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
|
||||
# tests that raise an exception in a mocked method that
|
||||
|
@ -78,52 +105,46 @@ def to_primitive(value, convert_instances=False, level=0):
|
|||
if getattr(value, '__module__', None) == 'mox':
|
||||
return 'mock'
|
||||
|
||||
if level > 3:
|
||||
if level > max_depth:
|
||||
return '?'
|
||||
|
||||
# The try block may not be necessary after the class check above,
|
||||
# but just in case ...
|
||||
try:
|
||||
recursive = functools.partial(to_primitive,
|
||||
convert_instances=convert_instances,
|
||||
convert_datetime=convert_datetime,
|
||||
level=level,
|
||||
max_depth=max_depth)
|
||||
if isinstance(value, dict):
|
||||
return dict((k, recursive(v)) for k, v in value.iteritems())
|
||||
elif isinstance(value, (list, tuple)):
|
||||
return [recursive(lv) for lv in value]
|
||||
|
||||
# It's not clear why xmlrpclib created their own DateTime type, but
|
||||
# for our purposes, make it a datetime type which is explicitly
|
||||
# handled
|
||||
if isinstance(value, xmlrpclib.DateTime):
|
||||
value = datetime.datetime(*tuple(value.timetuple())[:6])
|
||||
|
||||
if isinstance(value, (list, tuple)):
|
||||
o = []
|
||||
for v in value:
|
||||
o.append(to_primitive(v, convert_instances=convert_instances,
|
||||
level=level))
|
||||
return o
|
||||
elif isinstance(value, dict):
|
||||
o = {}
|
||||
for k, v in value.iteritems():
|
||||
o[k] = to_primitive(v, convert_instances=convert_instances,
|
||||
level=level)
|
||||
return o
|
||||
elif isinstance(value, datetime.datetime):
|
||||
if convert_datetime and isinstance(value, datetime.datetime):
|
||||
return timeutils.strtime(value)
|
||||
elif hasattr(value, 'iteritems'):
|
||||
return to_primitive(dict(value.iteritems()),
|
||||
convert_instances=convert_instances,
|
||||
level=level + 1)
|
||||
return recursive(dict(value.iteritems()), level=level + 1)
|
||||
elif hasattr(value, '__iter__'):
|
||||
return to_primitive(list(value),
|
||||
convert_instances=convert_instances,
|
||||
level=level)
|
||||
return recursive(list(value))
|
||||
elif convert_instances and hasattr(value, '__dict__'):
|
||||
# Likely an instance of something. Watch for cycles.
|
||||
# Ignore class member vars.
|
||||
return to_primitive(value.__dict__,
|
||||
convert_instances=convert_instances,
|
||||
level=level + 1)
|
||||
return recursive(value.__dict__, level=level + 1)
|
||||
else:
|
||||
if any(test(value) for test in _nasty_type_tests):
|
||||
return six.text_type(value)
|
||||
return value
|
||||
except TypeError, e:
|
||||
except TypeError:
|
||||
# Class objects are tricky since they may define something like
|
||||
# __iter__ defined but it isn't callable as list().
|
||||
return unicode(value)
|
||||
return six.text_type(value)
|
||||
|
||||
|
||||
def dumps(value, default=to_primitive, **kwargs):
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# Copyright 2011 OpenStack Foundation.
|
||||
# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
|
@ -19,7 +20,9 @@
|
|||
Utilities with minimum-depends for use in setup.py
|
||||
"""
|
||||
|
||||
import datetime
|
||||
from __future__ import print_function
|
||||
|
||||
import email
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
|
@ -33,20 +36,26 @@ def parse_mailmap(mailmap='.mailmap'):
|
|||
if os.path.exists(mailmap):
|
||||
with open(mailmap, 'r') as fp:
|
||||
for l in fp:
|
||||
l = l.strip()
|
||||
if not l.startswith('#') and ' ' in l:
|
||||
canonical_email, alias = [x for x in l.split(' ')
|
||||
if x.startswith('<')]
|
||||
mapping[alias] = canonical_email
|
||||
try:
|
||||
canonical_email, alias = re.match(
|
||||
r'[^#]*?(<.+>).*(<.+>).*', l).groups()
|
||||
except AttributeError:
|
||||
continue
|
||||
mapping[alias] = canonical_email
|
||||
return mapping
|
||||
|
||||
|
||||
def _parse_git_mailmap(git_dir, mailmap='.mailmap'):
|
||||
mailmap = os.path.join(os.path.dirname(git_dir), mailmap)
|
||||
return parse_mailmap(mailmap)
|
||||
|
||||
|
||||
def canonicalize_emails(changelog, mapping):
|
||||
"""Takes in a string and an email alias mapping and replaces all
|
||||
instances of the aliases in the string with their real email.
|
||||
"""
|
||||
for alias, email in mapping.iteritems():
|
||||
changelog = changelog.replace(alias, email)
|
||||
for alias, email_address in mapping.iteritems():
|
||||
changelog = changelog.replace(alias, email_address)
|
||||
return changelog
|
||||
|
||||
|
||||
|
@ -106,24 +115,18 @@ def parse_dependency_links(requirements_files=['requirements.txt',
|
|||
return dependency_links
|
||||
|
||||
|
||||
def write_requirements():
|
||||
venv = os.environ.get('VIRTUAL_ENV', None)
|
||||
if venv is not None:
|
||||
with open("requirements.txt", "w") as req_file:
|
||||
output = subprocess.Popen(["pip", "-E", venv, "freeze", "-l"],
|
||||
stdout=subprocess.PIPE)
|
||||
requirements = output.communicate()[0].strip()
|
||||
req_file.write(requirements)
|
||||
|
||||
|
||||
def _run_shell_command(cmd):
|
||||
def _run_shell_command(cmd, throw_on_error=False):
|
||||
if os.name == 'nt':
|
||||
output = subprocess.Popen(["cmd.exe", "/C", cmd],
|
||||
stdout=subprocess.PIPE)
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
else:
|
||||
output = subprocess.Popen(["/bin/sh", "-c", cmd],
|
||||
stdout=subprocess.PIPE)
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
out = output.communicate()
|
||||
if output.returncode and throw_on_error:
|
||||
raise Exception("%s returned %d" % cmd, output.returncode)
|
||||
if len(out) == 0:
|
||||
return None
|
||||
if len(out[0].strip()) == 0:
|
||||
|
@ -131,65 +134,26 @@ def _run_shell_command(cmd):
|
|||
return out[0].strip()
|
||||
|
||||
|
||||
def _get_git_next_version_suffix(branch_name):
|
||||
datestamp = datetime.datetime.now().strftime('%Y%m%d')
|
||||
if branch_name == 'milestone-proposed':
|
||||
revno_prefix = "r"
|
||||
else:
|
||||
revno_prefix = ""
|
||||
_run_shell_command("git fetch origin +refs/meta/*:refs/remotes/meta/*")
|
||||
milestone_cmd = "git show meta/openstack/release:%s" % branch_name
|
||||
milestonever = _run_shell_command(milestone_cmd)
|
||||
if milestonever:
|
||||
first_half = "%s~%s" % (milestonever, datestamp)
|
||||
else:
|
||||
first_half = datestamp
|
||||
|
||||
post_version = _get_git_post_version()
|
||||
# post version should look like:
|
||||
# 0.1.1.4.gcc9e28a
|
||||
# where the bit after the last . is the short sha, and the bit between
|
||||
# the last and second to last is the revno count
|
||||
(revno, sha) = post_version.split(".")[-2:]
|
||||
second_half = "%s%s.%s" % (revno_prefix, revno, sha)
|
||||
return ".".join((first_half, second_half))
|
||||
|
||||
|
||||
def _get_git_current_tag():
|
||||
return _run_shell_command("git tag --contains HEAD")
|
||||
|
||||
|
||||
def _get_git_tag_info():
|
||||
return _run_shell_command("git describe --tags")
|
||||
|
||||
|
||||
def _get_git_post_version():
|
||||
current_tag = _get_git_current_tag()
|
||||
if current_tag is not None:
|
||||
return current_tag
|
||||
else:
|
||||
tag_info = _get_git_tag_info()
|
||||
if tag_info is None:
|
||||
base_version = "0.0"
|
||||
cmd = "git --no-pager log --oneline"
|
||||
out = _run_shell_command(cmd)
|
||||
revno = len(out.split("\n"))
|
||||
sha = _run_shell_command("git describe --always")
|
||||
else:
|
||||
tag_infos = tag_info.split("-")
|
||||
base_version = "-".join(tag_infos[:-2])
|
||||
(revno, sha) = tag_infos[-2:]
|
||||
return "%s.%s.%s" % (base_version, revno, sha)
|
||||
def _get_git_directory():
|
||||
parent_dir = os.path.dirname(__file__)
|
||||
while True:
|
||||
git_dir = os.path.join(parent_dir, '.git')
|
||||
if os.path.exists(git_dir):
|
||||
return git_dir
|
||||
parent_dir, child = os.path.split(parent_dir)
|
||||
if not child: # reached to root dir
|
||||
return None
|
||||
|
||||
|
||||
def write_git_changelog():
|
||||
"""Write a changelog based on the git changelog."""
|
||||
new_changelog = 'ChangeLog'
|
||||
git_dir = _get_git_directory()
|
||||
if not os.getenv('SKIP_WRITE_GIT_CHANGELOG'):
|
||||
if os.path.isdir('.git'):
|
||||
git_log_cmd = 'git log --stat'
|
||||
if git_dir:
|
||||
git_log_cmd = 'git --git-dir=%s log' % git_dir
|
||||
changelog = _run_shell_command(git_log_cmd)
|
||||
mailmap = parse_mailmap()
|
||||
mailmap = _parse_git_mailmap(git_dir)
|
||||
with open(new_changelog, "w") as changelog_file:
|
||||
changelog_file.write(canonicalize_emails(changelog, mailmap))
|
||||
else:
|
||||
|
@ -201,13 +165,23 @@ def generate_authors():
|
|||
jenkins_email = 'jenkins@review.(openstack|stackforge).org'
|
||||
old_authors = 'AUTHORS.in'
|
||||
new_authors = 'AUTHORS'
|
||||
git_dir = _get_git_directory()
|
||||
if not os.getenv('SKIP_GENERATE_AUTHORS'):
|
||||
if os.path.isdir('.git'):
|
||||
if git_dir:
|
||||
# don't include jenkins email address in AUTHORS file
|
||||
git_log_cmd = ("git log --format='%aN <%aE>' | sort -u | "
|
||||
git_log_cmd = ("git --git-dir=" + git_dir +
|
||||
" log --format='%aN <%aE>' | sort -u | "
|
||||
"egrep -v '" + jenkins_email + "'")
|
||||
changelog = _run_shell_command(git_log_cmd)
|
||||
mailmap = parse_mailmap()
|
||||
signed_cmd = ("git --git-dir=" + git_dir +
|
||||
" log | grep -i Co-authored-by: | sort -u")
|
||||
signed_entries = _run_shell_command(signed_cmd)
|
||||
if signed_entries:
|
||||
new_entries = "\n".join(
|
||||
[signed.split(":", 1)[1].strip()
|
||||
for signed in signed_entries.split("\n") if signed])
|
||||
changelog = "\n".join((changelog, new_entries))
|
||||
mailmap = _parse_git_mailmap(git_dir)
|
||||
with open(new_authors, 'w') as new_authors_fh:
|
||||
new_authors_fh.write(canonicalize_emails(changelog, mailmap))
|
||||
if os.path.exists(old_authors):
|
||||
|
@ -227,26 +201,6 @@ _rst_template = """%(heading)s
|
|||
"""
|
||||
|
||||
|
||||
def read_versioninfo(project):
|
||||
"""Read the versioninfo file. If it doesn't exist, we're in a github
|
||||
zipball, and there's really no way to know what version we really
|
||||
are, but that should be ok, because the utility of that should be
|
||||
just about nil if this code path is in use in the first place."""
|
||||
versioninfo_path = os.path.join(project, 'versioninfo')
|
||||
if os.path.exists(versioninfo_path):
|
||||
with open(versioninfo_path, 'r') as vinfo:
|
||||
version = vinfo.read().strip()
|
||||
else:
|
||||
version = "0.0.0"
|
||||
return version
|
||||
|
||||
|
||||
def write_versioninfo(project, version):
|
||||
"""Write a simple file containing the version of the package."""
|
||||
with open(os.path.join(project, 'versioninfo'), 'w') as fil:
|
||||
fil.write("%s\n" % version)
|
||||
|
||||
|
||||
def get_cmdclass():
|
||||
"""Return dict of commands to run from setup.py."""
|
||||
|
||||
|
@ -276,8 +230,11 @@ def get_cmdclass():
|
|||
from sphinx.setup_command import BuildDoc
|
||||
|
||||
class LocalBuildDoc(BuildDoc):
|
||||
|
||||
builders = ['html', 'man']
|
||||
|
||||
def generate_autoindex(self):
|
||||
print "**Autodocumenting from %s" % os.path.abspath(os.curdir)
|
||||
print("**Autodocumenting from %s" % os.path.abspath(os.curdir))
|
||||
modules = {}
|
||||
option_dict = self.distribution.get_option_dict('build_sphinx')
|
||||
source_dir = os.path.join(option_dict['source_dir'][1], 'api')
|
||||
|
@ -302,7 +259,7 @@ def get_cmdclass():
|
|||
values = dict(module=module, heading=heading,
|
||||
underline=underline)
|
||||
|
||||
print "Generating %s" % output_filename
|
||||
print("Generating %s" % output_filename)
|
||||
with open(output_filename, 'w') as output_file:
|
||||
output_file.write(_rst_template % values)
|
||||
autoindex.write(" %s.rst\n" % module)
|
||||
|
@ -311,56 +268,102 @@ def get_cmdclass():
|
|||
if not os.getenv('SPHINX_DEBUG'):
|
||||
self.generate_autoindex()
|
||||
|
||||
for builder in ['html', 'man']:
|
||||
for builder in self.builders:
|
||||
self.builder = builder
|
||||
self.finalize_options()
|
||||
self.project = self.distribution.get_name()
|
||||
self.version = self.distribution.get_version()
|
||||
self.release = self.distribution.get_version()
|
||||
BuildDoc.run(self)
|
||||
|
||||
class LocalBuildLatex(LocalBuildDoc):
|
||||
builders = ['latex']
|
||||
|
||||
cmdclass['build_sphinx'] = LocalBuildDoc
|
||||
cmdclass['build_sphinx_latex'] = LocalBuildLatex
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return cmdclass
|
||||
|
||||
|
||||
def get_git_branchname():
|
||||
for branch in _run_shell_command("git branch --color=never").split("\n"):
|
||||
if branch.startswith('*'):
|
||||
_branch_name = branch.split()[1].strip()
|
||||
if _branch_name == "(no":
|
||||
_branch_name = "no-branch"
|
||||
return _branch_name
|
||||
def _get_revno(git_dir):
|
||||
"""Return the number of commits since the most recent tag.
|
||||
|
||||
We use git-describe to find this out, but if there are no
|
||||
tags then we fall back to counting commits since the beginning
|
||||
of time.
|
||||
"""
|
||||
describe = _run_shell_command(
|
||||
"git --git-dir=%s describe --always" % git_dir)
|
||||
if "-" in describe:
|
||||
return describe.rsplit("-", 2)[-2]
|
||||
|
||||
# no tags found
|
||||
revlist = _run_shell_command(
|
||||
"git --git-dir=%s rev-list --abbrev-commit HEAD" % git_dir)
|
||||
return len(revlist.splitlines())
|
||||
|
||||
|
||||
def get_pre_version(projectname, base_version):
|
||||
"""Return a version which is leading up to a version that will
|
||||
be released in the future."""
|
||||
if os.path.isdir('.git'):
|
||||
current_tag = _get_git_current_tag()
|
||||
if current_tag is not None:
|
||||
version = current_tag
|
||||
else:
|
||||
branch_name = os.getenv('BRANCHNAME',
|
||||
os.getenv('GERRIT_REFNAME',
|
||||
get_git_branchname()))
|
||||
version_suffix = _get_git_next_version_suffix(branch_name)
|
||||
version = "%s~%s" % (base_version, version_suffix)
|
||||
write_versioninfo(projectname, version)
|
||||
return version
|
||||
else:
|
||||
version = read_versioninfo(projectname)
|
||||
return version
|
||||
|
||||
|
||||
def get_post_version(projectname):
|
||||
def _get_version_from_git(pre_version):
|
||||
"""Return a version which is equal to the tag that's on the current
|
||||
revision if there is one, or tag plus number of additional revisions
|
||||
if the current revision has no tag."""
|
||||
|
||||
if os.path.isdir('.git'):
|
||||
version = _get_git_post_version()
|
||||
write_versioninfo(projectname, version)
|
||||
git_dir = _get_git_directory()
|
||||
if git_dir:
|
||||
if pre_version:
|
||||
try:
|
||||
return _run_shell_command(
|
||||
"git --git-dir=" + git_dir + " describe --exact-match",
|
||||
throw_on_error=True).replace('-', '.')
|
||||
except Exception:
|
||||
sha = _run_shell_command(
|
||||
"git --git-dir=" + git_dir + " log -n1 --pretty=format:%h")
|
||||
return "%s.a%s.g%s" % (pre_version, _get_revno(git_dir), sha)
|
||||
else:
|
||||
return _run_shell_command(
|
||||
"git --git-dir=" + git_dir + " describe --always").replace(
|
||||
'-', '.')
|
||||
return None
|
||||
|
||||
|
||||
def _get_version_from_pkg_info(package_name):
|
||||
"""Get the version from PKG-INFO file if we can."""
|
||||
try:
|
||||
pkg_info_file = open('PKG-INFO', 'r')
|
||||
except (IOError, OSError):
|
||||
return None
|
||||
try:
|
||||
pkg_info = email.message_from_file(pkg_info_file)
|
||||
except email.MessageError:
|
||||
return None
|
||||
# Check to make sure we're in our own dir
|
||||
if pkg_info.get('Name', None) != package_name:
|
||||
return None
|
||||
return pkg_info.get('Version', None)
|
||||
|
||||
|
||||
def get_version(package_name, pre_version=None):
|
||||
"""Get the version of the project. First, try getting it from PKG-INFO, if
|
||||
it exists. If it does, that means we're in a distribution tarball or that
|
||||
install has happened. Otherwise, if there is no PKG-INFO file, pull the
|
||||
version from git.
|
||||
|
||||
We do not support setup.py version sanity in git archive tarballs, nor do
|
||||
we support packagers directly sucking our git repo into theirs. We expect
|
||||
that a source tarball be made from our git repo - or that if someone wants
|
||||
to make a source tarball from a fork of our repo with additional tags in it
|
||||
that they understand and desire the results of doing that.
|
||||
"""
|
||||
version = os.environ.get("OSLO_PACKAGE_VERSION", None)
|
||||
if version:
|
||||
return version
|
||||
return read_versioninfo(projectname)
|
||||
version = _get_version_from_pkg_info(package_name)
|
||||
if version:
|
||||
return version
|
||||
version = _get_version_from_git(pre_version)
|
||||
if version:
|
||||
return version
|
||||
raise Exception("Versioning for this project requires either an sdist"
|
||||
" tarball, or access to an upstream git repository.")
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# Copyright 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
|
@ -25,18 +25,22 @@ import datetime
|
|||
import iso8601
|
||||
|
||||
|
||||
TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
|
||||
PERFECT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
|
||||
# ISO 8601 extended time format with microseconds
|
||||
_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f'
|
||||
_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
|
||||
PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND
|
||||
|
||||
|
||||
def isotime(at=None):
|
||||
def isotime(at=None, subsecond=False):
|
||||
"""Stringify time in ISO 8601 format"""
|
||||
if not at:
|
||||
at = utcnow()
|
||||
str = at.strftime(TIME_FORMAT)
|
||||
st = at.strftime(_ISO8601_TIME_FORMAT
|
||||
if not subsecond
|
||||
else _ISO8601_TIME_FORMAT_SUBSECOND)
|
||||
tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
|
||||
str += ('Z' if tz == 'UTC' else tz)
|
||||
return str
|
||||
st += ('Z' if tz == 'UTC' else tz)
|
||||
return st
|
||||
|
||||
|
||||
def parse_isotime(timestr):
|
||||
|
|
3
setup.py
3
setup.py
|
@ -14,7 +14,7 @@ tests_require = setup.parse_requirements(['tools/test-requires'])
|
|||
|
||||
setuptools.setup(
|
||||
name="python-keystoneclient",
|
||||
version=setup.get_post_version('keystoneclient'),
|
||||
version=setup.get_version('python-keystoneclient'),
|
||||
description="Client library for OpenStack Identity API (Keystone)",
|
||||
long_description=read('README.rst'),
|
||||
url='https://github.com/openstack/python-keystoneclient',
|
||||
|
@ -42,5 +42,4 @@ setuptools.setup(
|
|||
entry_points={
|
||||
'console_scripts': ['keystone = keystoneclient.shell:main']
|
||||
},
|
||||
data_files=[('keystoneclient', ['keystoneclient/versioninfo'])],
|
||||
)
|
||||
|
|
Loading…
Reference in New Issue