Retire repo
This repo was created by accident, use deb-python-oslo.serialization instead. Needed-By: I1ac1a06931c8b6dd7c2e73620a0302c29e605f03 Change-Id: I81894aea69b9d09b0977039623c26781093a397a
This commit is contained in:
parent
1f86a66a36
commit
ab82bf23bf
@ -1,8 +0,0 @@
|
||||
[run]
|
||||
branch = True
|
||||
source = oslo_serialization
|
||||
omit = oslo_serialization/tests/*
|
||||
|
||||
[report]
|
||||
ignore_errors = True
|
||||
precision = 2
|
52
.gitignore
vendored
52
.gitignore
vendored
@ -1,52 +0,0 @@
|
||||
*.py[cod]
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Packages
|
||||
*.egg
|
||||
*.egg-info
|
||||
dist
|
||||
build
|
||||
eggs
|
||||
parts
|
||||
bin
|
||||
var
|
||||
sdist
|
||||
develop-eggs
|
||||
.installed.cfg
|
||||
lib
|
||||
lib64
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
.coverage
|
||||
cover
|
||||
.tox
|
||||
nosetests.xml
|
||||
.testrepository
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
||||
# Mr Developer
|
||||
.mr.developer.cfg
|
||||
.project
|
||||
.pydevproject
|
||||
|
||||
# Complexity
|
||||
output/*.html
|
||||
output/*/index.html
|
||||
|
||||
# Sphinx
|
||||
doc/build
|
||||
|
||||
# pbr generates these
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
|
||||
# Editors
|
||||
*~
|
||||
.*.swp
|
@ -1,4 +0,0 @@
|
||||
[gerrit]
|
||||
host=review.openstack.org
|
||||
port=29418
|
||||
project=openstack/oslo.serialization.git
|
3
.mailmap
3
.mailmap
@ -1,3 +0,0 @@
|
||||
# Format is:
|
||||
# <preferred e-mail> <other e-mail 1>
|
||||
# <preferred e-mail> <other e-mail 2>
|
@ -1,7 +0,0 @@
|
||||
[DEFAULT]
|
||||
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
||||
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
||||
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \
|
||||
${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION
|
||||
test_id_option=--load-list $IDFILE
|
||||
test_list_option=--list
|
@ -1,16 +0,0 @@
|
||||
If you would like to contribute to the development of OpenStack,
|
||||
you must follow the steps in this page:
|
||||
|
||||
http://docs.openstack.org/infra/manual/developers.html
|
||||
|
||||
Once those steps have been completed, changes to OpenStack
|
||||
should be submitted for review via the Gerrit tool, following
|
||||
the workflow documented at:
|
||||
|
||||
http://docs.openstack.org/infra/manual/developers.html#development-workflow
|
||||
|
||||
Pull requests submitted through GitHub will be ignored.
|
||||
|
||||
Bugs should be filed on Launchpad, not GitHub:
|
||||
|
||||
https://bugs.launchpad.net/oslo.serialization
|
@ -1,4 +0,0 @@
|
||||
oslo.serialization Style Commandments
|
||||
======================================================
|
||||
|
||||
Read the OpenStack Style Commandments http://docs.openstack.org/developer/hacking/
|
175
LICENSE
175
LICENSE
@ -1,175 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
19
README.rst
19
README.rst
@ -1,19 +0,0 @@
|
||||
====================
|
||||
oslo.serialization
|
||||
====================
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/oslo.serialization.svg
|
||||
:target: https://pypi.python.org/pypi/oslo.serialization/
|
||||
:alt: Latest Version
|
||||
|
||||
.. image:: https://img.shields.io/pypi/dm/oslo.serialization.svg
|
||||
:target: https://pypi.python.org/pypi/oslo.serialization/
|
||||
:alt: Downloads
|
||||
|
||||
The oslo.serialization library provides support for representing objects
|
||||
in transmittable and storable formats, such as Base64, JSON and MessagePack.
|
||||
|
||||
* Free software: Apache license
|
||||
* Documentation: http://docs.openstack.org/developer/oslo.serialization
|
||||
* Source: http://git.openstack.org/cgit/openstack/oslo.serialization
|
||||
* Bugs: http://bugs.launchpad.net/oslo.serialization
|
13
README.txt
Normal file
13
README.txt
Normal file
@ -0,0 +1,13 @@
|
||||
This project is no longer maintained.
|
||||
|
||||
The contents of this repository are still available in the Git
|
||||
source code management system. To see the contents of this
|
||||
repository before it reached its end of life, please check out the
|
||||
previous commit with "git checkout HEAD^1".
|
||||
|
||||
Use instead the project deb-python-oslo.serialization at
|
||||
http://git.openstack.org/cgit/openstack/deb-python-oslo.serialization .
|
||||
|
||||
For any further questions, please email
|
||||
openstack-dev@lists.openstack.org or join #openstack-dev on
|
||||
Freenode.
|
@ -1,17 +0,0 @@
|
||||
base64
|
||||
======
|
||||
|
||||
.. automodule:: oslo_serialization.base64
|
||||
:members:
|
||||
|
||||
jsonutils
|
||||
=========
|
||||
|
||||
.. automodule:: oslo_serialization.jsonutils
|
||||
:members:
|
||||
|
||||
msgpackutils
|
||||
============
|
||||
|
||||
.. automodule:: oslo_serialization.msgpackutils
|
||||
:members:
|
@ -1,75 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.abspath('../..'))
|
||||
# -- General configuration ----------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
#'sphinx.ext.intersphinx',
|
||||
'oslosphinx'
|
||||
]
|
||||
|
||||
# autodoc generation is a bit aggressive and a nuisance when doing heavy
|
||||
# text edit cycles.
|
||||
# execute "export SPHINX_DEBUG=1" in your terminal to disable
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'oslo.serialization'
|
||||
copyright = u'2014, OpenStack Foundation'
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = True
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# -- Options for HTML output --------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
# html_theme_path = ["."]
|
||||
# html_theme = '_theme'
|
||||
# html_static_path = ['static']
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = '%sdoc' % project
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass
|
||||
# [howto/manual]).
|
||||
latex_documents = [
|
||||
('index',
|
||||
'%s.tex' % project,
|
||||
u'%s Documentation' % project,
|
||||
u'OpenStack Foundation', 'manual'),
|
||||
]
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
#intersphinx_mapping = {'http://docs.python.org/': None}
|
@ -1,5 +0,0 @@
|
||||
==============
|
||||
Contributing
|
||||
==============
|
||||
|
||||
.. include:: ../../CONTRIBUTING.rst
|
@ -1 +0,0 @@
|
||||
.. include:: ../../ChangeLog
|
@ -1,37 +0,0 @@
|
||||
==============================================
|
||||
Welcome to oslo.serialization's documentation!
|
||||
==============================================
|
||||
|
||||
The `oslo`_ serialization library provides support for representing objects
|
||||
in transmittable and storable formats, such as JSON and MessagePack.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
installation
|
||||
contributing
|
||||
|
||||
API Documentation
|
||||
=================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
api
|
||||
|
||||
Release Notes
|
||||
=============
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
history
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
||||
.. _oslo: https://wiki.openstack.org/wiki/Oslo
|
@ -1,12 +0,0 @@
|
||||
============
|
||||
Installation
|
||||
============
|
||||
|
||||
At the command line::
|
||||
|
||||
$ pip install oslo.serialization
|
||||
|
||||
Or, if you have virtualenvwrapper installed::
|
||||
|
||||
$ mkvirtualenv oslo.serialization
|
||||
$ pip install oslo.serialization
|
@ -1,85 +0,0 @@
|
||||
# Copyright 2015 Red Hat
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Utilities to encode and decode Base64.
|
||||
|
||||
.. versionadded:: 1.10
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import base64
|
||||
|
||||
import six
|
||||
|
||||
|
||||
def encode_as_bytes(s, encoding='utf-8'):
|
||||
"""Encode a string using Base64.
|
||||
|
||||
If *s* is a text string, first encode it to *encoding* (UTF-8 by default).
|
||||
|
||||
:param s: bytes or text string to be encoded
|
||||
:param encoding: encoding used to encode *s* if it's a text string
|
||||
:returns: Base64 encoded byte string (bytes)
|
||||
|
||||
Use encode_as_text() to get the Base64 encoded string as text.
|
||||
"""
|
||||
if isinstance(s, six.text_type):
|
||||
s = s.encode(encoding)
|
||||
return base64.b64encode(s)
|
||||
|
||||
|
||||
def encode_as_text(s, encoding='utf-8'):
|
||||
"""Encode a string using Base64.
|
||||
|
||||
If *s* is a text string, first encode it to *encoding* (UTF-8 by default).
|
||||
|
||||
:param s: bytes or text string to be encoded
|
||||
:param encoding: encoding used to encode *s* if it's a text string
|
||||
:returns: Base64 encoded text string (Unicode)
|
||||
|
||||
Use encode_as_bytes() to get the Base64 encoded string as bytes.
|
||||
"""
|
||||
encoded = encode_as_bytes(s, encoding=encoding)
|
||||
return encoded.decode('ascii')
|
||||
|
||||
|
||||
def decode_as_bytes(encoded):
|
||||
"""Decode a Base64 encoded string.
|
||||
|
||||
:param encoded: bytes or text Base64 encoded string to be decoded
|
||||
:returns: decoded bytes string (bytes)
|
||||
|
||||
Use decode_as_text() to get the decoded string as text.
|
||||
"""
|
||||
if isinstance(encoded, bytes):
|
||||
encoded = encoded.decode('ascii')
|
||||
return base64.b64decode(encoded)
|
||||
|
||||
|
||||
def decode_as_text(encoded, encoding='utf-8'):
|
||||
"""Decode a Base64 encoded string.
|
||||
|
||||
Decode the Base64 string and then decode the result from *encoding*
|
||||
(UTF-8 by default).
|
||||
|
||||
:param encoded: bytes or text Base64 encoded string to be decoded
|
||||
:returns: decoded text string (bytes)
|
||||
|
||||
Use decode_as_bytes() to get the decoded string as bytes.
|
||||
"""
|
||||
decoded = decode_as_bytes(encoded)
|
||||
return decoded.decode(encoding)
|
@ -1,263 +0,0 @@
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2011 Justin Santa Barbara
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
'''
|
||||
JSON related utilities.
|
||||
|
||||
This module provides a few things:
|
||||
|
||||
#. A handy function for getting an object down to something that can be
|
||||
JSON serialized. See :func:`.to_primitive`.
|
||||
#. Wrappers around :func:`.loads` and :func:`.dumps`. The :func:`.dumps`
|
||||
wrapper will automatically use :func:`.to_primitive` for you if needed.
|
||||
#. This sets up ``anyjson`` to use the :func:`.loads` and :func:`.dumps`
|
||||
wrappers if ``anyjson`` is available.
|
||||
'''
|
||||
|
||||
|
||||
import codecs
|
||||
import datetime
|
||||
import functools
|
||||
import inspect
|
||||
import itertools
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from oslo_utils import encodeutils
|
||||
from oslo_utils import importutils
|
||||
from oslo_utils import timeutils
|
||||
import six
|
||||
import six.moves.xmlrpc_client as xmlrpclib
|
||||
|
||||
ipaddress = importutils.try_import("ipaddress")
|
||||
netaddr = importutils.try_import("netaddr")
|
||||
|
||||
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
|
||||
inspect.isfunction, inspect.isgeneratorfunction,
|
||||
inspect.isgenerator, inspect.istraceback, inspect.isframe,
|
||||
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
|
||||
inspect.isabstract]
|
||||
|
||||
_simple_types = ((six.text_type,) + six.integer_types
|
||||
+ (type(None), bool, float))
|
||||
|
||||
|
||||
def to_primitive(value, convert_instances=False, convert_datetime=True,
|
||||
level=0, max_depth=3, encoding='utf-8'):
|
||||
"""Convert a complex object into primitives.
|
||||
|
||||
Handy for JSON serialization. We can optionally handle instances,
|
||||
but since this is a recursive function, we could have cyclical
|
||||
data structures.
|
||||
|
||||
To handle cyclical data structures we could track the actual objects
|
||||
visited in a set, but not all objects are hashable. Instead we just
|
||||
track the depth of the object inspections and don't go too deep.
|
||||
|
||||
Therefore, ``convert_instances=True`` is lossy ... be aware.
|
||||
|
||||
.. versionchanged:: 1.3
|
||||
Support UUID encoding.
|
||||
|
||||
.. versionchanged:: 1.6
|
||||
Dictionary keys are now also encoded.
|
||||
"""
|
||||
# handle obvious types first - order of basic types determined by running
|
||||
# full tests on nova project, resulting in the following counts:
|
||||
# 572754 <type 'NoneType'>
|
||||
# 460353 <type 'int'>
|
||||
# 379632 <type 'unicode'>
|
||||
# 274610 <type 'str'>
|
||||
# 199918 <type 'dict'>
|
||||
# 114200 <type 'datetime.datetime'>
|
||||
# 51817 <type 'bool'>
|
||||
# 26164 <type 'list'>
|
||||
# 6491 <type 'float'>
|
||||
# 283 <type 'tuple'>
|
||||
# 19 <type 'long'>
|
||||
if isinstance(value, _simple_types):
|
||||
return value
|
||||
|
||||
if isinstance(value, six.binary_type):
|
||||
if six.PY3:
|
||||
value = value.decode(encoding=encoding)
|
||||
return value
|
||||
|
||||
# It's not clear why xmlrpclib created their own DateTime type, but
|
||||
# for our purposes, make it a datetime type which is explicitly
|
||||
# handled
|
||||
if isinstance(value, xmlrpclib.DateTime):
|
||||
value = datetime.datetime(*tuple(value.timetuple())[:6])
|
||||
|
||||
if isinstance(value, datetime.datetime):
|
||||
if convert_datetime:
|
||||
return value.strftime(timeutils.PERFECT_TIME_FORMAT)
|
||||
else:
|
||||
return value
|
||||
|
||||
if isinstance(value, uuid.UUID):
|
||||
return six.text_type(value)
|
||||
|
||||
if netaddr and isinstance(value, netaddr.IPAddress):
|
||||
return six.text_type(value)
|
||||
|
||||
if ipaddress and isinstance(value,
|
||||
(ipaddress.IPv4Address,
|
||||
ipaddress.IPv6Address)):
|
||||
return six.text_type(value)
|
||||
|
||||
# value of itertools.count doesn't get caught by nasty_type_tests
|
||||
# and results in infinite loop when list(value) is called.
|
||||
if type(value) == itertools.count:
|
||||
return six.text_type(value)
|
||||
|
||||
if any(test(value) for test in _nasty_type_tests):
|
||||
return six.text_type(value)
|
||||
|
||||
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
|
||||
# tests that raise an exception in a mocked method that
|
||||
# has a @wrap_exception with a notifier will fail. If
|
||||
# we up the dependency to 0.5.4 (when it is released) we
|
||||
# can remove this workaround.
|
||||
if getattr(value, '__module__', None) == 'mox':
|
||||
return 'mock'
|
||||
|
||||
if level > max_depth:
|
||||
return None
|
||||
|
||||
# The try block may not be necessary after the class check above,
|
||||
# but just in case ...
|
||||
try:
|
||||
recursive = functools.partial(to_primitive,
|
||||
convert_instances=convert_instances,
|
||||
convert_datetime=convert_datetime,
|
||||
level=level,
|
||||
max_depth=max_depth,
|
||||
encoding=encoding)
|
||||
if isinstance(value, dict):
|
||||
return {recursive(k): recursive(v)
|
||||
for k, v in six.iteritems(value)}
|
||||
elif hasattr(value, 'iteritems'):
|
||||
return recursive(dict(value.iteritems()), level=level + 1)
|
||||
# Python 3 does not have iteritems
|
||||
elif hasattr(value, 'items'):
|
||||
return recursive(dict(value.items()), level=level + 1)
|
||||
elif hasattr(value, '__iter__'):
|
||||
return list(map(recursive, value))
|
||||
elif convert_instances and hasattr(value, '__dict__'):
|
||||
# Likely an instance of something. Watch for cycles.
|
||||
# Ignore class member vars.
|
||||
return recursive(value.__dict__, level=level + 1)
|
||||
except TypeError:
|
||||
# Class objects are tricky since they may define something like
|
||||
# __iter__ defined but it isn't callable as list().
|
||||
return six.text_type(value)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
JSONEncoder = json.JSONEncoder
|
||||
JSONDecoder = json.JSONDecoder
|
||||
|
||||
|
||||
def dumps(obj, default=to_primitive, **kwargs):
|
||||
"""Serialize ``obj`` to a JSON formatted ``str``.
|
||||
|
||||
:param obj: object to be serialized
|
||||
:param default: function that returns a serializable version of an object,
|
||||
:func:`to_primitive` is used by default.
|
||||
:param kwargs: extra named parameters, please see documentation \
|
||||
of `json.dumps <https://docs.python.org/2/library/json.html#basic-usage>`_
|
||||
:returns: json formatted string
|
||||
|
||||
Use dump_as_bytes() to ensure that the result type is ``bytes`` on Python 2
|
||||
and Python 3.
|
||||
"""
|
||||
return json.dumps(obj, default=default, **kwargs)
|
||||
|
||||
|
||||
def dump_as_bytes(obj, default=to_primitive, encoding='utf-8', **kwargs):
|
||||
"""Serialize ``obj`` to a JSON formatted ``bytes``.
|
||||
|
||||
:param obj: object to be serialized
|
||||
:param default: function that returns a serializable version of an object,
|
||||
:func:`to_primitive` is used by default.
|
||||
:param encoding: encoding used to encode the serialized JSON output
|
||||
:param kwargs: extra named parameters, please see documentation \
|
||||
of `json.dumps <https://docs.python.org/2/library/json.html#basic-usage>`_
|
||||
:returns: json formatted string
|
||||
|
||||
.. versionadded:: 1.10
|
||||
"""
|
||||
serialized = dumps(obj, default=default, **kwargs)
|
||||
if isinstance(serialized, six.text_type):
|
||||
# On Python 3, json.dumps() returns Unicode
|
||||
serialized = serialized.encode(encoding)
|
||||
return serialized
|
||||
|
||||
|
||||
def dump(obj, fp, *args, **kwargs):
|
||||
"""Serialize ``obj`` as a JSON formatted stream to ``fp``
|
||||
|
||||
:param obj: object to be serialized
|
||||
:param fp: a ``.write()``-supporting file-like object
|
||||
:param default: function that returns a serializable version of an object,
|
||||
:func:`to_primitive` is used by default.
|
||||
:param args: extra arguments, please see documentation \
|
||||
of `json.dump <https://docs.python.org/2/library/json.html#basic-usage>`_
|
||||
:param kwargs: extra named parameters, please see documentation \
|
||||
of `json.dump <https://docs.python.org/2/library/json.html#basic-usage>`_
|
||||
|
||||
.. versionchanged:: 1.3
|
||||
The *default* parameter now uses :func:`to_primitive` by default.
|
||||
"""
|
||||
default = kwargs.get('default', to_primitive)
|
||||
return json.dump(obj, fp, default=default, *args, **kwargs)
|
||||
|
||||
|
||||
def loads(s, encoding='utf-8', **kwargs):
|
||||
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
|
||||
|
||||
:param s: string to deserialize
|
||||
:param encoding: encoding used to interpret the string
|
||||
:param kwargs: extra named parameters, please see documentation \
|
||||
of `json.loads <https://docs.python.org/2/library/json.html#basic-usage>`_
|
||||
:returns: python object
|
||||
"""
|
||||
return json.loads(encodeutils.safe_decode(s, encoding), **kwargs)
|
||||
|
||||
|
||||
def load(fp, encoding='utf-8', **kwargs):
|
||||
"""Deserialize ``fp`` to a Python object.
|
||||
|
||||
:param fp: a ``.read()`` -supporting file-like object
|
||||
:param encoding: encoding used to interpret the string
|
||||
:param kwargs: extra named parameters, please see documentation \
|
||||
of `json.loads <https://docs.python.org/2/library/json.html#basic-usage>`_
|
||||
:returns: python object
|
||||
"""
|
||||
return json.load(codecs.getreader(encoding)(fp), **kwargs)
|
||||
|
||||
|
||||
try:
|
||||
import anyjson
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
anyjson._modules.append((__name__, 'dumps', TypeError,
|
||||
'loads', ValueError, 'load'))
|
||||
anyjson.force_implementation(__name__)
|
@ -1,487 +0,0 @@
|
||||
# Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
'''
|
||||
MessagePack related utilities.
|
||||
|
||||
This module provides a few things:
|
||||
|
||||
#. A handy registry for getting an object down to something that can be
|
||||
msgpack serialized. See :class:`.HandlerRegistry`.
|
||||
#. Wrappers around :func:`.loads` and :func:`.dumps`. The :func:`.dumps`
|
||||
wrapper will automatically use
|
||||
the :py:attr:`~oslo_serialization.msgpackutils.default_registry` for
|
||||
you if needed.
|
||||
|
||||
.. versionadded:: 1.3
|
||||
'''
|
||||
|
||||
|
||||
import datetime
|
||||
import functools
|
||||
import itertools
|
||||
import uuid
|
||||
|
||||
import msgpack
|
||||
from oslo_utils import importutils
|
||||
from pytz import timezone
|
||||
import six
|
||||
import six.moves.xmlrpc_client as xmlrpclib
|
||||
|
||||
netaddr = importutils.try_import("netaddr")
|
||||
|
||||
|
||||
class Interval(object):
|
||||
"""Small and/or simple immutable integer/float interval class.
|
||||
|
||||
Interval checking is **inclusive** of the min/max boundaries.
|
||||
"""
|
||||
|
||||
def __init__(self, min_value, max_value):
|
||||
if min_value > max_value:
|
||||
raise ValueError("Minimum value %s must be less than"
|
||||
" or equal to maximum value %s" % (min_value,
|
||||
max_value))
|
||||
self._min_value = min_value
|
||||
self._max_value = max_value
|
||||
|
||||
@property
|
||||
def min_value(self):
|
||||
return self._min_value
|
||||
|
||||
@property
|
||||
def max_value(self):
|
||||
return self._max_value
|
||||
|
||||
def __contains__(self, value):
|
||||
return value >= self.min_value and value <= self.max_value
|
||||
|
||||
def __repr__(self):
|
||||
return 'Interval(%s, %s)' % (self._min_value, self._max_value)
|
||||
|
||||
|
||||
# Expose these so that users don't have to import msgpack to gain these.
|
||||
|
||||
PackException = msgpack.PackException
|
||||
UnpackException = msgpack.UnpackException
|
||||
|
||||
|
||||
class HandlerRegistry(object):
|
||||
"""Registry of *type* specific msgpack handlers extensions.
|
||||
|
||||
See: https://github.com/msgpack/msgpack/blob/master/spec.md#formats-ext
|
||||
|
||||
Do note that due to the current limitations in the msgpack python
|
||||
library we can not *currently* dump/load a tuple without converting
|
||||
it to a list.
|
||||
|
||||
This may be fixed in: https://github.com/msgpack/msgpack-python/pull/100
|
||||
|
||||
.. versionadded:: 1.5
|
||||
"""
|
||||
|
||||
reserved_extension_range = Interval(0, 32)
|
||||
"""
|
||||
These ranges are **always** reserved for use by ``oslo.serialization`` and
|
||||
its own add-ons extensions (these extensions are meant to be generally
|
||||
applicable to all of python).
|
||||
"""
|
||||
|
||||
non_reserved_extension_range = Interval(33, 127)
|
||||
"""
|
||||
These ranges are **always** reserved for use by applications building
|
||||
their own type specific handlers (the meaning of extensions in this range
|
||||
will typically vary depending on application).
|
||||
"""
|
||||
|
||||
min_value = 0
|
||||
"""
|
||||
Applications can assign 0 to 127 to store application (or library)
|
||||
specific type handlers; see above ranges for what is reserved by this
|
||||
library and what is not.
|
||||
"""
|
||||
|
||||
max_value = 127
|
||||
"""
|
||||
Applications can assign 0 to 127 to store application (or library)
|
||||
specific type handlers; see above ranges for what is reserved by this
|
||||
library and what is not.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._handlers = {}
|
||||
self._num_handlers = 0
|
||||
self.frozen = False
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterates over **all** registered handlers."""
|
||||
for handlers in six.itervalues(self._handlers):
|
||||
for h in handlers:
|
||||
yield h
|
||||
|
||||
def register(self, handler, reserved=False, override=False):
|
||||
"""Register a extension handler to handle its associated type."""
|
||||
if self.frozen:
|
||||
raise ValueError("Frozen handler registry can't be modified")
|
||||
if reserved:
|
||||
ok_interval = self.reserved_extension_range
|
||||
else:
|
||||
ok_interval = self.non_reserved_extension_range
|
||||
ident = handler.identity
|
||||
if ident < ok_interval.min_value:
|
||||
raise ValueError("Handler '%s' identity must be greater"
|
||||
" or equal to %s" % (handler,
|
||||
ok_interval.min_value))
|
||||
if ident > ok_interval.max_value:
|
||||
raise ValueError("Handler '%s' identity must be less than"
|
||||
" or equal to %s" % (handler,
|
||||
ok_interval.max_value))
|
||||
if ident in self._handlers and override:
|
||||
existing_handlers = self._handlers[ident]
|
||||
# Insert at the front so that overrides get selected before
|
||||
# whatever existed before the override...
|
||||
existing_handlers.insert(0, handler)
|
||||
self._num_handlers += 1
|
||||
elif ident in self._handlers and not override:
|
||||
raise ValueError("Already registered handler(s) with"
|
||||
" identity %s: %s" % (ident,
|
||||
self._handlers[ident]))
|
||||
else:
|
||||
self._handlers[ident] = [handler]
|
||||
self._num_handlers += 1
|
||||
|
||||
def __len__(self):
|
||||
"""Return how many extension handlers are registered."""
|
||||
return self._num_handlers
|
||||
|
||||
def __contains__(self, identity):
|
||||
"""Return if any handler exists for the given identity (number)."""
|
||||
return identity in self._handlers
|
||||
|
||||
def copy(self, unfreeze=False):
|
||||
"""Deep copy the given registry (and its handlers)."""
|
||||
c = type(self)()
|
||||
for ident, handlers in six.iteritems(self._handlers):
|
||||
cloned_handlers = []
|
||||
for h in handlers:
|
||||
if hasattr(h, 'copy'):
|
||||
h = h.copy(c)
|
||||
cloned_handlers.append(h)
|
||||
c._handlers[ident] = cloned_handlers
|
||||
c._num_handlers += len(cloned_handlers)
|
||||
if not unfreeze and self.frozen:
|
||||
c.frozen = True
|
||||
return c
|
||||
|
||||
def get(self, identity):
|
||||
"""Get the handler for the given numeric identity (or none)."""
|
||||
maybe_handlers = self._handlers.get(identity)
|
||||
if maybe_handlers:
|
||||
# Prefer the first (if there are many) as this is how we
|
||||
# override built-in extensions (for those that wish to do this).
|
||||
return maybe_handlers[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def match(self, obj):
|
||||
"""Match the registries handlers to the given object (or none)."""
|
||||
for possible_handlers in six.itervalues(self._handlers):
|
||||
for h in possible_handlers:
|
||||
if isinstance(obj, h.handles):
|
||||
return h
|
||||
return None
|
||||
|
||||
|
||||
class UUIDHandler(object):
|
||||
identity = 0
|
||||
handles = (uuid.UUID,)
|
||||
|
||||
@staticmethod
|
||||
def serialize(obj):
|
||||
return six.text_type(obj.hex).encode('ascii')
|
||||
|
||||
@staticmethod
|
||||
def deserialize(data):
|
||||
return uuid.UUID(hex=six.text_type(data, encoding='ascii'))
|
||||
|
||||
|
||||
class DateTimeHandler(object):
|
||||
identity = 1
|
||||
handles = (datetime.datetime,)
|
||||
|
||||
def __init__(self, registry):
|
||||
self._registry = registry
|
||||
|
||||
def copy(self, registry):
|
||||
return type(self)(registry)
|
||||
|
||||
def serialize(self, dt):
|
||||
dct = {
|
||||
u'day': dt.day,
|
||||
u'month': dt.month,
|
||||
u'year': dt.year,
|
||||
u'hour': dt.hour,
|
||||
u'minute': dt.minute,
|
||||
u'second': dt.second,
|
||||
u'microsecond': dt.microsecond,
|
||||
}
|
||||
if dt.tzinfo:
|
||||
tz = dt.tzinfo.tzname(None)
|
||||
if six.PY2:
|
||||
tz = tz.decode("ascii")
|
||||
dct[u'tz'] = tz
|
||||
return dumps(dct, registry=self._registry)
|
||||
|
||||
def deserialize(self, blob):
|
||||
dct = loads(blob, registry=self._registry)
|
||||
|
||||
if six.PY3 and b"day" in dct:
|
||||
# NOTE(sileht): oslo.serialization <= 2.4.1 was
|
||||
# storing thing as unicode for py3 while is was
|
||||
# bytes for py2
|
||||
# For python2, we don't care bytes or unicode works
|
||||
# for dict keys and tz
|
||||
# But for python3, we have some backward compability
|
||||
# to take care in case of the payload have been produced
|
||||
# by python2 and now read by python3
|
||||
dct = dict((k.decode("ascii"), v) for k, v in dct.items())
|
||||
if 'tz' in dct:
|
||||
dct['tz'] = dct['tz'].decode("ascii")
|
||||
|
||||
dt = datetime.datetime(day=dct['day'],
|
||||
month=dct['month'],
|
||||
year=dct['year'],
|
||||
hour=dct['hour'],
|
||||
minute=dct['minute'],
|
||||
second=dct['second'],
|
||||
microsecond=dct['microsecond'])
|
||||
if 'tz' in dct:
|
||||
tzinfo = timezone(dct['tz'])
|
||||
dt = tzinfo.localize(dt)
|
||||
return dt
|
||||
|
||||
|
||||
class CountHandler(object):
|
||||
identity = 2
|
||||
handles = (itertools.count,)
|
||||
|
||||
@staticmethod
|
||||
def serialize(obj):
|
||||
# FIXME(harlowja): figure out a better way to avoid hacking into
|
||||
# the string representation of count to get at the right numbers...
|
||||
obj = six.text_type(obj)
|
||||
start = obj.find("(") + 1
|
||||
end = obj.rfind(")")
|
||||
pieces = obj[start:end].split(",")
|
||||
if len(pieces) == 1:
|
||||
start = int(pieces[0])
|
||||
step = 1
|
||||
else:
|
||||
start = int(pieces[0])
|
||||
step = int(pieces[1])
|
||||
return msgpack.packb([start, step])
|
||||
|
||||
@staticmethod
|
||||
def deserialize(data):
|
||||
value = msgpack.unpackb(data)
|
||||
start, step = value
|
||||
return itertools.count(start, step)
|
||||
|
||||
|
||||
if netaddr is not None:
|
||||
class NetAddrIPHandler(object):
|
||||
identity = 3
|
||||
handles = (netaddr.IPAddress,)
|
||||
|
||||
@staticmethod
|
||||
def serialize(obj):
|
||||
return msgpack.packb(obj.value)
|
||||
|
||||
@staticmethod
|
||||
def deserialize(data):
|
||||
return netaddr.IPAddress(msgpack.unpackb(data))
|
||||
else:
|
||||
NetAddrIPHandler = None
|
||||
|
||||
|
||||
class SetHandler(object):
|
||||
identity = 4
|
||||
handles = (set,)
|
||||
|
||||
def __init__(self, registry):
|
||||
self._registry = registry
|
||||
|
||||
def copy(self, registry):
|
||||
return type(self)(registry)
|
||||
|
||||
def serialize(self, obj):
|
||||
return dumps(list(obj), registry=self._registry)
|
||||
|
||||
def deserialize(self, data):
|
||||
return self.handles[0](loads(data, registry=self._registry))
|
||||
|
||||
|
||||
class FrozenSetHandler(SetHandler):
|
||||
identity = 5
|
||||
handles = (frozenset,)
|
||||
|
||||
|
||||
class XMLRPCDateTimeHandler(object):
|
||||
handles = (xmlrpclib.DateTime,)
|
||||
identity = 6
|
||||
|
||||
def __init__(self, registry):
|
||||
self._handler = DateTimeHandler(registry)
|
||||
|
||||
def copy(self, registry):
|
||||
return type(self)(registry)
|
||||
|
||||
def serialize(self, obj):
|
||||
dt = datetime.datetime(*tuple(obj.timetuple())[:6])
|
||||
return self._handler.serialize(dt)
|
||||
|
||||
def deserialize(self, blob):
|
||||
dt = self._handler.deserialize(blob)
|
||||
return xmlrpclib.DateTime(dt.timetuple())
|
||||
|
||||
|
||||
class DateHandler(object):
|
||||
identity = 7
|
||||
handles = (datetime.date,)
|
||||
|
||||
def __init__(self, registry):
|
||||
self._registry = registry
|
||||
|
||||
def copy(self, registry):
|
||||
return type(self)(registry)
|
||||
|
||||
def serialize(self, d):
|
||||
dct = {
|
||||
u'year': d.year,
|
||||
u'month': d.month,
|
||||
u'day': d.day,
|
||||
}
|
||||
return dumps(dct, registry=self._registry)
|
||||
|
||||
def deserialize(self, blob):
|
||||
dct = loads(blob, registry=self._registry)
|
||||
if six.PY3 and b"day" in dct:
|
||||
# NOTE(sileht): see DateTimeHandler.deserialize()
|
||||
dct = dict((k.decode("ascii"), v) for k, v in dct.items())
|
||||
|
||||
return datetime.date(year=dct['year'],
|
||||
month=dct['month'],
|
||||
day=dct['day'])
|
||||
|
||||
|
||||
def _serializer(registry, obj):
|
||||
handler = registry.match(obj)
|
||||
if handler is None:
|
||||
raise ValueError("No serialization handler registered"
|
||||
" for type '%s'" % (type(obj).__name__))
|
||||
return msgpack.ExtType(handler.identity, handler.serialize(obj))
|
||||
|
||||
|
||||
def _unserializer(registry, code, data):
|
||||
handler = registry.get(code)
|
||||
if not handler:
|
||||
return msgpack.ExtType(code, data)
|
||||
else:
|
||||
return handler.deserialize(data)
|
||||
|
||||
|
||||
def _create_default_registry():
|
||||
registry = HandlerRegistry()
|
||||
registry.register(DateTimeHandler(registry), reserved=True)
|
||||
registry.register(DateHandler(registry), reserved=True)
|
||||
registry.register(UUIDHandler(), reserved=True)
|
||||
registry.register(CountHandler(), reserved=True)
|
||||
registry.register(SetHandler(registry), reserved=True)
|
||||
registry.register(FrozenSetHandler(registry), reserved=True)
|
||||
if netaddr is not None:
|
||||
registry.register(NetAddrIPHandler(), reserved=True)
|
||||
registry.register(XMLRPCDateTimeHandler(registry), reserved=True)
|
||||
registry.frozen = True
|
||||
return registry
|
||||
|
||||
|
||||
default_registry = _create_default_registry()
|
||||
"""
|
||||
Default, read-only/frozen registry that will be used when none is provided.
|
||||
|
||||
This registry has msgpack extensions for the following:
|
||||
|
||||
* ``DateTime`` objects.
|
||||
* ``Date`` objects.
|
||||
* ``UUID`` objects.
|
||||
* ``itertools.count`` objects/iterators.
|
||||
* ``set`` and ``frozenset`` container(s).
|
||||
* ``netaddr.IPAddress`` objects (only if ``netaddr`` is importable).
|
||||
* ``xmlrpclib.DateTime`` datetime objects.
|
||||
|
||||
.. versionadded:: 1.5
|
||||
"""
|
||||
|
||||
|
||||
def load(fp, registry=None):
|
||||
"""Deserialize ``fp`` into a Python object.
|
||||
|
||||
.. versionchanged:: 1.5
|
||||
Added *registry* parameter.
|
||||
"""
|
||||
if registry is None:
|
||||
registry = default_registry
|
||||
# NOTE(harlowja): the reason we can't use the more native msgpack functions
|
||||
# here is that the unpack() function (oddly) doesn't seem to take a
|
||||
# 'ext_hook' parameter..
|
||||
ext_hook = functools.partial(_unserializer, registry)
|
||||
return msgpack.Unpacker(fp, ext_hook=ext_hook, encoding='utf-8').unpack()
|
||||
|
||||
|
||||
def dump(obj, fp, registry=None):
|
||||
"""Serialize ``obj`` as a messagepack formatted stream to ``fp``.
|
||||
|
||||
.. versionchanged:: 1.5
|
||||
Added *registry* parameter.
|
||||
"""
|
||||
if registry is None:
|
||||
registry = default_registry
|
||||
return msgpack.pack(obj, fp,
|
||||
default=functools.partial(_serializer, registry),
|
||||
use_bin_type=True)
|
||||
|
||||
|
||||
def dumps(obj, registry=None):
|
||||
"""Serialize ``obj`` to a messagepack formatted ``str``.
|
||||
|
||||
.. versionchanged:: 1.5
|
||||
Added *registry* parameter.
|
||||
"""
|
||||
if registry is None:
|
||||
registry = default_registry
|
||||
return msgpack.packb(obj,
|
||||
default=functools.partial(_serializer, registry),
|
||||
use_bin_type=True)
|
||||
|
||||
|
||||
def loads(s, registry=None):
|
||||
"""Deserialize ``s`` messagepack ``str`` into a Python object.
|
||||
|
||||
.. versionchanged:: 1.5
|
||||
Added *registry* parameter.
|
||||
"""
|
||||
if registry is None:
|
||||
registry = default_registry
|
||||
ext_hook = functools.partial(_unserializer, registry)
|
||||
return msgpack.unpackb(s, ext_hook=ext_hook, encoding='utf-8')
|
@ -1,58 +0,0 @@
|
||||
# Copyright 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Unified and simplified API for oslo.serialization's serializers.
|
||||
"""
|
||||
|
||||
|
||||
import abc
|
||||
import six
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BaseSerializer(object):
|
||||
"""Generic (de-)serialization definition abstract base class."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def dump(self, obj, fp):
|
||||
"""Serialize ``obj`` as a stream to ``fp``.
|
||||
|
||||
:param obj: python object to be serialized
|
||||
:param fp: ``.write()``-supporting file-like object
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def dump_as_bytes(self, obj):
|
||||
"""Serialize ``obj`` to a byte string.
|
||||
|
||||
:param obj: python object to be serialized
|
||||
:returns: byte string
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load(self, fp):
|
||||
"""Deserialize ``fp`` to a python object.
|
||||
|
||||
:param fp: ``.read()``-supporting file-like object
|
||||
:returns: python object
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_from_bytes(self, s):
|
||||
"""Deserialize ``s`` to a python object.
|
||||
|
||||
:param s: byte string to be deserialized
|
||||
:returns: python object
|
||||
"""
|
@ -1,38 +0,0 @@
|
||||
# Copyright 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_serialization.serializer.base_serializer import BaseSerializer
|
||||
|
||||
|
||||
class JSONSerializer(BaseSerializer):
|
||||
"""JSON serializer based on the jsonutils module."""
|
||||
|
||||
def __init__(self, default=jsonutils.to_primitive, encoding='utf-8'):
|
||||
self._default = default
|
||||
self._encoding = encoding
|
||||
|
||||
def dump(self, obj, fp):
|
||||
return jsonutils.dump(obj, fp)
|
||||
|
||||
def dump_as_bytes(self, obj):
|
||||
return jsonutils.dump_as_bytes(obj, default=self._default,
|
||||
encoding=self._encoding)
|
||||
|
||||
def load(self, fp):
|
||||
return jsonutils.load(fp, encoding=self._encoding)
|
||||
|
||||
def load_from_bytes(self, s):
|
||||
return jsonutils.loads(s, encoding=self._encoding)
|
@ -1,36 +0,0 @@
|
||||
# Copyright 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from oslo_serialization import msgpackutils
|
||||
from oslo_serialization.serializer.base_serializer import BaseSerializer
|
||||
|
||||
|
||||
class MessagePackSerializer(BaseSerializer):
|
||||
"""MessagePack serializer based on the msgpackutils module."""
|
||||
|
||||
def __init__(self, registry=None):
|
||||
self._registry = registry
|
||||
|
||||
def dump(self, obj, fp):
|
||||
return msgpackutils.dump(obj, fp, registry=self._registry)
|
||||
|
||||
def dump_as_bytes(self, obj):
|
||||
return msgpackutils.dumps(obj, registry=self._registry)
|
||||
|
||||
def load(self, fp):
|
||||
return msgpackutils.load(fp, registry=self._registry)
|
||||
|
||||
def load_from_bytes(self, s):
|
||||
return msgpackutils.loads(s, registry=self._registry)
|
@ -1,56 +0,0 @@
|
||||
# Copyright 2015 Red Hat
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_serialization import base64
|
||||
from oslotest import base as test_base
|
||||
|
||||
|
||||
class Base64Tests(test_base.BaseTestCase):
|
||||
|
||||
def test_encode_as_bytes(self):
|
||||
self.assertEqual(b'dGV4dA==',
|
||||
base64.encode_as_bytes(b'text'))
|
||||
self.assertEqual(b'dGV4dA==',
|
||||
base64.encode_as_bytes(u'text'))
|
||||
self.assertEqual(b'ZTrDqQ==',
|
||||
base64.encode_as_bytes(u'e:\xe9'))
|
||||
self.assertEqual(b'ZTrp',
|
||||
base64.encode_as_bytes(u'e:\xe9', encoding='latin1'))
|
||||
|
||||
def test_encode_as_text(self):
|
||||
self.assertEqual(u'dGV4dA==',
|
||||
base64.encode_as_text(b'text'))
|
||||
self.assertEqual(u'dGV4dA==',
|
||||
base64.encode_as_text(u'text'))
|
||||
self.assertEqual(u'ZTrDqQ==',
|
||||
base64.encode_as_text(u'e:\xe9'))
|
||||
self.assertEqual(u'ZTrp',
|
||||
base64.encode_as_text(u'e:\xe9', encoding='latin1'))
|
||||
|
||||
def test_decode_as_bytes(self):
|
||||
self.assertEqual(b'text',
|
||||
base64.decode_as_bytes(b'dGV4dA=='))
|
||||
self.assertEqual(b'text',
|
||||
base64.decode_as_bytes(u'dGV4dA=='))
|
||||
|
||||
def test_decode_as_text(self):
|
||||
self.assertEqual(u'text',
|
||||
base64.decode_as_text(b'dGV4dA=='))
|
||||
self.assertEqual(u'text',
|
||||
base64.decode_as_text(u'dGV4dA=='))
|
||||
self.assertEqual(u'e:\xe9',
|
||||
base64.decode_as_text(u'ZTrDqQ=='))
|
||||
self.assertEqual(u'e:\xe9',
|
||||
base64.decode_as_text(u'ZTrp', encoding='latin1'))
|
@ -1,333 +0,0 @@
|
||||
# Copyright 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import collections
|
||||
import datetime
|
||||
import ipaddress
|
||||
import json
|
||||
|
||||
import mock
|
||||
import netaddr
|
||||
from oslo_i18n import fixture
|
||||
from oslotest import base as test_base
|
||||
import six
|
||||
import six.moves.xmlrpc_client as xmlrpclib
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
|
||||
class JSONUtilsTestMixin(object):
|
||||
|
||||
json_impl = None
|
||||
|
||||
def setUp(self):
|
||||
super(JSONUtilsTestMixin, self).setUp()
|
||||
self.json_patcher = mock.patch.multiple(
|
||||
jsonutils, json=self.json_impl,
|
||||
)
|
||||
self.json_impl_mock = self.json_patcher.start()
|
||||
|
||||
def tearDown(self):
|
||||
self.json_patcher.stop()
|
||||
super(JSONUtilsTestMixin, self).tearDown()
|
||||
|
||||
def test_dumps(self):
|
||||
self.assertEqual('{"a": "b"}', jsonutils.dumps({'a': 'b'}))
|
||||
|
||||
def test_dump_as_bytes(self):
|
||||
self.assertEqual(b'{"a": "b"}', jsonutils.dump_as_bytes({'a': 'b'}))
|
||||
|
||||
def test_dumps_namedtuple(self):
|
||||
n = collections.namedtuple("foo", "bar baz")(1, 2)
|
||||
self.assertEqual('[1, 2]', jsonutils.dumps(n))
|
||||
|
||||
def test_dump(self):
|
||||
expected = '{"a": "b"}'
|
||||
json_dict = {'a': 'b'}
|
||||
|
||||
fp = six.StringIO()
|
||||
jsonutils.dump(json_dict, fp)
|
||||
|
||||
self.assertEqual(expected, fp.getvalue())
|
||||
|
||||
def test_dump_namedtuple(self):
|
||||
expected = '[1, 2]'
|
||||
json_dict = collections.namedtuple("foo", "bar baz")(1, 2)
|
||||
|
||||
fp = six.StringIO()
|
||||
jsonutils.dump(json_dict, fp)
|
||||
|
||||
self.assertEqual(expected, fp.getvalue())
|
||||
|
||||
def test_loads(self):
|
||||
self.assertEqual({'a': 'b'}, jsonutils.loads('{"a": "b"}'))
|
||||
|
||||
def test_loads_unicode(self):
|
||||
self.assertIsInstance(jsonutils.loads(b'"foo"'), six.text_type)
|
||||
self.assertIsInstance(jsonutils.loads(u'"foo"'), six.text_type)
|
||||
|
||||
# 'test' in Ukrainian
|
||||
i18n_str_unicode = u'"\u0442\u0435\u0441\u0442"'
|
||||
self.assertIsInstance(jsonutils.loads(i18n_str_unicode), six.text_type)
|
||||
|
||||
i18n_str = i18n_str_unicode.encode('utf-8')
|
||||
self.assertIsInstance(jsonutils.loads(i18n_str), six.text_type)
|
||||
|
||||
def test_loads_with_kwargs(self):
|
||||
jsontext = u'{"foo": 3}'
|
||||
result = jsonutils.loads(jsontext, parse_int=lambda x: 5)
|
||||
self.assertEqual(5, result['foo'])
|
||||
|
||||
def test_load(self):
|
||||
|
||||
jsontext = u'{"a": "\u0442\u044d\u0441\u0442"}'
|
||||
expected = {u'a': u'\u0442\u044d\u0441\u0442'}
|
||||
|
||||
for encoding in ('utf-8', 'cp1251'):
|
||||
fp = six.BytesIO(jsontext.encode(encoding))
|
||||
result = jsonutils.load(fp, encoding=encoding)
|
||||
self.assertEqual(expected, result)
|
||||
for key, val in result.items():
|
||||
self.assertIsInstance(key, six.text_type)
|
||||
self.assertIsInstance(val, six.text_type)
|
||||
|
||||
|
||||
class JSONUtilsTestJson(JSONUtilsTestMixin, test_base.BaseTestCase):
|
||||
json_impl = json
|
||||
|
||||
|
||||
class ToPrimitiveTestCase(test_base.BaseTestCase):
|
||||
def setUp(self):
|
||||
super(ToPrimitiveTestCase, self).setUp()
|
||||
self.trans_fixture = self.useFixture(fixture.Translation())
|
||||
|
||||
def test_bytes(self):
|
||||
self.assertEqual(jsonutils.to_primitive(b'abc'), 'abc')
|
||||
|
||||
def test_list(self):
|
||||
self.assertEqual([1, 2, 3], jsonutils.to_primitive([1, 2, 3]))
|
||||
|
||||
def test_empty_list(self):
|
||||
self.assertEqual([], jsonutils.to_primitive([]))
|
||||
|
||||
def test_tuple(self):
|
||||
self.assertEqual([1, 2, 3], jsonutils.to_primitive((1, 2, 3)))
|
||||
|
||||
def test_dict(self):
|
||||
self.assertEqual(dict(a=1, b=2, c=3),
|
||||
jsonutils.to_primitive(dict(a=1, b=2, c=3)))
|
||||
|
||||
def test_empty_dict(self):
|
||||
self.assertEqual({}, jsonutils.to_primitive({}))
|
||||
|
||||
def test_datetime(self):
|
||||
x = datetime.datetime(1920, 2, 3, 4, 5, 6, 7)
|
||||
self.assertEqual('1920-02-03T04:05:06.000007',
|
||||
jsonutils.to_primitive(x))
|
||||
|
||||
def test_datetime_preserve(self):
|
||||
x = datetime.datetime(1920, 2, 3, 4, 5, 6, 7)
|
||||
self.assertEqual(x, jsonutils.to_primitive(x, convert_datetime=False))
|
||||
|
||||
def test_DateTime(self):
|
||||
x = xmlrpclib.DateTime()
|
||||
x.decode("19710203T04:05:06")
|
||||
self.assertEqual('1971-02-03T04:05:06.000000',
|
||||
jsonutils.to_primitive(x))
|
||||
|
||||
def test_iter(self):
|
||||
class IterClass(object):
|
||||
def __init__(self):
|
||||
self.data = [1, 2, 3, 4, 5]
|
||||
self.index = 0
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
if self.index == len(self.data):
|
||||
raise StopIteration
|
||||
self.index = self.index + 1
|
||||
return self.data[self.index - 1]
|
||||
__next__ = next
|
||||
|
||||
x = IterClass()
|
||||
self.assertEqual([1, 2, 3, 4, 5], jsonutils.to_primitive(x))
|
||||
|
||||
def test_iteritems(self):
|
||||
class IterItemsClass(object):
|
||||
def __init__(self):
|
||||
self.data = dict(a=1, b=2, c=3).items()
|
||||
self.index = 0
|
||||
|
||||
def iteritems(self):
|
||||
return self.data
|
||||
|
||||
x = IterItemsClass()
|
||||
p = jsonutils.to_primitive(x)
|
||||
self.assertEqual({'a': 1, 'b': 2, 'c': 3}, p)
|
||||
|
||||
def test_iteritems_with_cycle(self):
|
||||
class IterItemsClass(object):
|
||||
def __init__(self):
|
||||
self.data = dict(a=1, b=2, c=3)
|
||||
self.index = 0
|
||||
|
||||
def iteritems(self):
|
||||
return self.data.items()
|
||||
|
||||
x = IterItemsClass()
|
||||
x2 = IterItemsClass()
|
||||
x.data['other'] = x2
|
||||
x2.data['other'] = x
|
||||
|
||||
# If the cycle isn't caught, to_primitive() will eventually result in
|
||||
# an exception due to excessive recursion depth.
|
||||
jsonutils.to_primitive(x)
|
||||
|
||||
def test_items(self):
|
||||
# Use items() when iteritems() is not available.
|
||||
class ItemsClass(object):
|
||||
def __init__(self):
|
||||
self.data = dict(a=1, b=2, c=3)
|
||||
|
||||
def items(self):
|
||||
return self.data.items()
|
||||
|
||||
x = ItemsClass()
|
||||
p = jsonutils.to_primitive(x)
|
||||
self.assertEqual({'a': 1, 'b': 2, 'c': 3}, p)
|
||||
|
||||
def test_precedence_items_iteritems(self):
|
||||
class ItemsIterItemsClass(object):
|
||||
def items(self):
|
||||
return {'items': 'items'}
|
||||
|
||||
def iteritems(self):
|
||||
return {'iteritems': 'iteritems'}
|
||||
|
||||
x = ItemsIterItemsClass()
|
||||
p = jsonutils.to_primitive(x)
|
||||
# Prefer iteritems over items
|
||||
self.assertEqual({'iteritems': 'iteritems'}, p)
|
||||
|
||||
def test_mapping(self):
|
||||
# Make sure collections.Mapping is converted to a dict
|
||||
# and not a list.
|
||||
class MappingClass(collections.Mapping):
|
||||
def __init__(self):
|
||||
self.data = dict(a=1, b=2, c=3)
|
||||
|
||||
def __getitem__(self, val):
|
||||
return self.data[val]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.data)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.data)
|
||||
|
||||
x = MappingClass()
|
||||
p = jsonutils.to_primitive(x)
|
||||
self.assertEqual({'a': 1, 'b': 2, 'c': 3}, p)
|
||||
|
||||
def test_instance(self):
|
||||
class MysteryClass(object):
|
||||
a = 10
|
||||
|
||||
def __init__(self):
|
||||
self.b = 1
|
||||
|
||||
x = MysteryClass()
|
||||
self.assertEqual(dict(b=1),
|
||||
jsonutils.to_primitive(x, convert_instances=True))
|
||||
|
||||
self.assertEqual(x, jsonutils.to_primitive(x))
|
||||
|
||||
def test_typeerror(self):
|
||||
x = bytearray # Class, not instance
|
||||
if six.PY3:
|
||||
self.assertEqual(u"<class 'bytearray'>", jsonutils.to_primitive(x))
|
||||
else:
|
||||
self.assertEqual(u"<type 'bytearray'>", jsonutils.to_primitive(x))
|
||||
|
||||
def test_nasties(self):
|
||||
def foo():
|
||||
pass
|
||||
x = [datetime, foo, dir]
|
||||
ret = jsonutils.to_primitive(x)
|
||||
self.assertEqual(3, len(ret))
|
||||
self.assertTrue(ret[0].startswith(u"<module 'datetime' from ") or
|
||||
ret[0].startswith(u"<module 'datetime' (built-in)"))
|
||||
if six.PY3:
|
||||
self.assertTrue(ret[1].startswith(
|
||||
'<function ToPrimitiveTestCase.test_nasties.<locals>.foo at 0x'
|
||||
))
|
||||
else:
|
||||
self.assertTrue(ret[1].startswith('<function foo at 0x'))
|
||||
self.assertEqual('<built-in function dir>', ret[2])
|
||||
|
||||
def test_depth(self):
|
||||
class LevelsGenerator(object):
|
||||
def __init__(self, levels):
|
||||
self._levels = levels
|
||||
|
||||
def iteritems(self):
|
||||
if self._levels == 0:
|
||||
return iter([])
|
||||
else:
|
||||
return iter([(0, LevelsGenerator(self._levels - 1))])
|
||||
|
||||
l4_obj = LevelsGenerator(4)
|
||||
|
||||
json_l2 = {0: {0: None}}
|
||||
json_l3 = {0: {0: {0: None}}}
|
||||
json_l4 = {0: {0: {0: {0: None}}}}
|
||||
|
||||
ret = jsonutils.to_primitive(l4_obj, max_depth=2)
|
||||
self.assertEqual(json_l2, ret)
|
||||
|
||||
ret = jsonutils.to_primitive(l4_obj, max_depth=3)
|
||||
self.assertEqual(json_l3, ret)
|
||||
|
||||
ret = jsonutils.to_primitive(l4_obj, max_depth=4)
|
||||
self.assertEqual(json_l4, ret)
|
||||
|
||||
def test_ipaddr_using_netaddr(self):
|
||||
thing = {'ip_addr': netaddr.IPAddress('1.2.3.4')}
|
||||
ret = jsonutils.to_primitive(thing)
|
||||
self.assertEqual({'ip_addr': '1.2.3.4'}, ret)
|
||||
|
||||
def test_ipaddr_using_ipaddress_v4(self):
|
||||
thing = {'ip_addr': ipaddress.ip_address(u'192.168.0.1')}
|
||||
ret = jsonutils.to_primitive(thing)
|
||||
self.assertEqual({'ip_addr': '192.168.0.1'}, ret)
|
||||
|
||||
def test_ipaddr_using_ipaddress_v6(self):
|
||||
thing = {'ip_addr': ipaddress.ip_address(u'2001:db8::')}
|
||||
ret = jsonutils.to_primitive(thing)
|
||||
self.assertEqual({'ip_addr': '2001:db8::'}, ret)
|
||||
|
||||
def test_message_with_param(self):
|
||||
msg = self.trans_fixture.lazy('A message with param: %s')
|
||||
msg = msg % 'test_domain'
|
||||
ret = jsonutils.to_primitive(msg)
|
||||
self.assertEqual(msg, ret)
|
||||
|
||||
def test_message_with_named_param(self):
|
||||
msg = self.trans_fixture.lazy('A message with params: %(param)s')
|
||||
msg = msg % {'param': 'hello'}
|
||||
ret = jsonutils.to_primitive(msg)
|
||||
self.assertEqual(msg, ret)
|
@ -1,214 +0,0 @@
|
||||
# Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
import itertools
|
||||
import uuid
|
||||
|
||||
import netaddr
|
||||
from oslotest import base as test_base
|
||||
from pytz import timezone
|
||||
import six
|
||||
import six.moves.xmlrpc_client as xmlrpclib
|
||||
|
||||
from oslo_serialization import msgpackutils
|
||||
|
||||
|
||||
_TZ_FMT = '%Y-%m-%d %H:%M:%S %Z%z'
|
||||
|
||||
|
||||
class Color(object):
|
||||
def __init__(self, r, g, b):
|
||||
self.r = r
|
||||
self.g = g
|
||||
self.b = b
|
||||
|
||||
|
||||
class ColorHandler(object):
|
||||
handles = (Color,)
|
||||
identity = (
|
||||
msgpackutils.HandlerRegistry.non_reserved_extension_range.min_value + 1
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def serialize(obj):
|
||||
blob = '%s, %s, %s' % (obj.r, obj.g, obj.b)
|
||||
if six.PY3:
|
||||
blob = blob.encode("ascii")
|
||||
return blob
|
||||
|
||||
@staticmethod
|
||||
def deserialize(data):
|
||||
chunks = [int(c.strip()) for c in data.split(b",")]
|
||||
return Color(chunks[0], chunks[1], chunks[2])
|
||||
|
||||
|
||||
class MySpecialSetHandler(object):
|
||||
handles = (set,)
|
||||
identity = msgpackutils.SetHandler.identity
|
||||
|
||||
|
||||
def _dumps_loads(obj):
|
||||
obj = msgpackutils.dumps(obj)
|
||||
return msgpackutils.loads(obj)
|
||||
|
||||
|
||||
class MsgPackUtilsTest(test_base.BaseTestCase):
|
||||
def test_list(self):
|
||||
self.assertEqual([1, 2, 3], _dumps_loads([1, 2, 3]))
|
||||
|
||||
def test_empty_list(self):
|
||||
self.assertEqual([], _dumps_loads([]))
|
||||
|
||||
def test_tuple(self):
|
||||
# Seems like we do lose whether it was a tuple or not...
|
||||
#
|
||||
# Maybe fixed someday:
|
||||
#
|
||||
# https://github.com/msgpack/msgpack-python/issues/98
|
||||
self.assertEqual([1, 2, 3], _dumps_loads((1, 2, 3)))
|
||||
|
||||
def test_dict(self):
|
||||
self.assertEqual(dict(a=1, b=2, c=3),
|
||||
_dumps_loads(dict(a=1, b=2, c=3)))
|
||||
|
||||
def test_empty_dict(self):
|
||||
self.assertEqual({}, _dumps_loads({}))
|
||||
|
||||
def test_complex_dict(self):
|
||||
src = {
|
||||
'now': datetime.datetime(1920, 2, 3, 4, 5, 6, 7),
|
||||
'later': datetime.datetime(1921, 2, 3, 4, 5, 6, 9),
|
||||
'a': 1,
|
||||
'b': 2.0,
|
||||
'c': [],
|
||||
'd': set([1, 2, 3]),
|
||||
'zzz': uuid.uuid4(),
|
||||
'yyy': 'yyy',
|
||||
'ddd': b'bbb',
|
||||
'today': datetime.date.today(),
|
||||
}
|
||||
self.assertEqual(src, _dumps_loads(src))
|
||||
|
||||
def test_itercount(self):
|
||||
it = itertools.count(1)
|
||||
six.next(it)
|
||||
six.next(it)
|
||||
it2 = _dumps_loads(it)
|
||||
self.assertEqual(six.next(it), six.next(it2))
|
||||
|
||||
it = itertools.count(0)
|
||||
it2 = _dumps_loads(it)
|
||||
self.assertEqual(six.next(it), six.next(it2))
|
||||
|
||||
def test_itercount_step(self):
|
||||
it = itertools.count(1, 3)
|
||||
it2 = _dumps_loads(it)
|
||||
self.assertEqual(six.next(it), six.next(it2))
|
||||
|
||||
def test_set(self):
|
||||
self.assertEqual(set([1, 2]), _dumps_loads(set([1, 2])))
|
||||
|
||||
def test_empty_set(self):
|
||||
self.assertEqual(set([]), _dumps_loads(set([])))
|
||||
|
||||
def test_frozenset(self):
|
||||
self.assertEqual(frozenset([1, 2]), _dumps_loads(frozenset([1, 2])))
|
||||
|
||||
def test_empty_frozenset(self):
|
||||
self.assertEqual(frozenset([]), _dumps_loads(frozenset([])))
|
||||
|
||||
def test_datetime_preserve(self):
|
||||
x = datetime.datetime(1920, 2, 3, 4, 5, 6, 7)
|
||||
self.assertEqual(x, _dumps_loads(x))
|
||||
|
||||
def test_datetime(self):
|
||||
x = xmlrpclib.DateTime()
|
||||
x.decode("19710203T04:05:06")
|
||||
self.assertEqual(x, _dumps_loads(x))
|
||||
|
||||
def test_ipaddr(self):
|
||||
thing = {'ip_addr': netaddr.IPAddress('1.2.3.4')}
|
||||
self.assertEqual(thing, _dumps_loads(thing))
|
||||
|
||||
def test_today(self):
|
||||
today = datetime.date.today()
|
||||
self.assertEqual(today, _dumps_loads(today))
|
||||
|
||||
def test_datetime_tz_clone(self):
|
||||
eastern = timezone('US/Eastern')
|
||||
now = datetime.datetime.now()
|
||||
e_dt = eastern.localize(now)
|
||||
e_dt2 = _dumps_loads(e_dt)
|
||||
self.assertEqual(e_dt, e_dt2)
|
||||
self.assertEqual(e_dt.strftime(_TZ_FMT), e_dt2.strftime(_TZ_FMT))
|
||||
|
||||
def test_datetime_tz_different(self):
|
||||
eastern = timezone('US/Eastern')
|
||||
pacific = timezone('US/Pacific')
|
||||
now = datetime.datetime.now()
|
||||
|
||||
e_dt = eastern.localize(now)
|
||||
p_dt = pacific.localize(now)
|
||||
|
||||
self.assertNotEqual(e_dt, p_dt)
|
||||
self.assertNotEqual(e_dt.strftime(_TZ_FMT), p_dt.strftime(_TZ_FMT))
|
||||
|
||||
e_dt2 = _dumps_loads(e_dt)
|
||||
p_dt2 = _dumps_loads(p_dt)
|
||||
|
||||
self.assertNotEqual(e_dt2, p_dt2)
|
||||
self.assertNotEqual(e_dt2.strftime(_TZ_FMT), p_dt2.strftime(_TZ_FMT))
|
||||
|
||||
self.assertEqual(e_dt, e_dt2)
|
||||
self.assertEqual(p_dt, p_dt2)
|
||||
|
||||
def test_copy_then_register(self):
|
||||
registry = msgpackutils.default_registry
|
||||
self.assertRaises(ValueError,
|
||||
registry.register, MySpecialSetHandler(),
|
||||
reserved=True, override=True)
|
||||
registry = registry.copy(unfreeze=True)
|
||||
registry.register(MySpecialSetHandler(),
|
||||
reserved=True, override=True)
|
||||
h = registry.match(set())
|
||||
self.assertIsInstance(h, MySpecialSetHandler)
|
||||
|
||||
def test_bad_register(self):
|
||||
registry = msgpackutils.default_registry
|
||||
self.assertRaises(ValueError,
|
||||
registry.register, MySpecialSetHandler(),
|
||||
reserved=True, override=True)
|
||||
self.assertRaises(ValueError,
|
||||
registry.register, MySpecialSetHandler())
|
||||
registry = registry.copy(unfreeze=True)
|
||||
registry.register(ColorHandler())
|
||||
|
||||
self.assertRaises(ValueError,
|
||||
registry.register, ColorHandler())
|
||||
|
||||
def test_custom_register(self):
|
||||
registry = msgpackutils.default_registry.copy(unfreeze=True)
|
||||
registry.register(ColorHandler())
|
||||
|
||||
c = Color(255, 254, 253)
|
||||
c_b = msgpackutils.dumps(c, registry=registry)
|
||||
c = msgpackutils.loads(c_b, registry=registry)
|
||||
|
||||
self.assertEqual(255, c.r)
|
||||
self.assertEqual(254, c.g)
|
||||
self.assertEqual(253, c.b)
|
||||
|
||||
def test_object(self):
|
||||
self.assertRaises(ValueError, msgpackutils.dumps, object())
|
@ -1,14 +0,0 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
# NOTE(harlowja): Because oslo.serialization is used by the client libraries,
|
||||
# we do not want to add a lot of dependencies to it. If you find that
|
||||
# adding a new feature to oslo.serialization means adding a new dependency,
|
||||
# that is a likely indicator that the feature belongs somewhere else.
|
||||
|
||||
pbr>=1.6 # Apache-2.0
|
||||
six>=1.9.0 # MIT
|
||||
msgpack-python>=0.4.0 # Apache-2.0
|
||||
oslo.utils>=3.16.0 # Apache-2.0
|
||||
pytz>=2013.6 # MIT
|
52
setup.cfg
52
setup.cfg
@ -1,52 +0,0 @@
|
||||
[metadata]
|
||||
name = oslo.serialization
|
||||
summary = Oslo Serialization library
|
||||
description-file =
|
||||
README.rst
|
||||
author = OpenStack
|
||||
author-email = openstack-dev@lists.openstack.org
|
||||
home-page = http://launchpad.net/oslo
|
||||
classifier =
|
||||
Environment :: OpenStack
|
||||
Intended Audience :: Information Technology
|
||||
Intended Audience :: System Administrators
|
||||
License :: OSI Approved :: Apache Software License
|
||||
Operating System :: POSIX :: Linux
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 2
|
||||
Programming Language :: Python :: 2.7
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3.4
|
||||
Programming Language :: Python :: 3.5
|
||||
|
||||
[files]
|
||||
packages =
|
||||
oslo_serialization
|
||||
|
||||
[pbr]
|
||||
warnerrors = true
|
||||
|
||||
[build_sphinx]
|
||||
source-dir = doc/source
|
||||
build-dir = doc/build
|
||||
all_files = 1
|
||||
|
||||
[upload_sphinx]
|
||||
upload-dir = doc/build/html
|
||||
|
||||
[compile_catalog]
|
||||
directory = oslo.serialization/locale
|
||||
domain = oslo.serialization
|
||||
|
||||
[update_catalog]
|
||||
domain = oslo.serialization
|
||||
output_dir = oslo.serialization/locale
|
||||
input_file = oslo.serialization/locale/oslo.serialization.pot
|
||||
|
||||
[extract_messages]
|
||||
keywords = _ gettext ngettext l_ lazy_gettext
|
||||
mapping_file = babel.cfg
|
||||
output_file = oslo.serialization/locale/oslo.serialization.pot
|
||||
|
||||
[wheel]
|
||||
universal = 1
|
29
setup.py
29
setup.py
@ -1,29 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
|
||||
import setuptools
|
||||
|
||||
# In python < 2.7.4, a lazy loading of package `pbr` will break
|
||||
# setuptools if some other modules registered functions in `atexit`.
|
||||
# solution from: http://bugs.python.org/issue15881#msg170215
|
||||
try:
|
||||
import multiprocessing # noqa
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
setuptools.setup(
|
||||
setup_requires=['pbr>=1.8'],
|
||||
pbr=True)
|
@ -1,15 +0,0 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
hacking<0.11,>=0.10.0
|
||||
ipaddress>=1.0.7;python_version<'3.3' # PSF
|
||||
mock>=2.0 # BSD
|
||||
netaddr!=0.7.16,>=0.7.12 # BSD
|
||||
|
||||
# this is required for the docs build jobs
|
||||
sphinx!=1.3b1,<1.3,>=1.2.1 # BSD
|
||||
oslosphinx!=3.4.0,>=2.5.0 # Apache-2.0
|
||||
|
||||
oslotest>=1.10.0 # Apache-2.0
|
||||
oslo.i18n>=2.1.0 # Apache-2.0
|
||||
coverage>=3.6 # Apache-2.0
|
34
tox.ini
34
tox.ini
@ -1,34 +0,0 @@
|
||||
[tox]
|
||||
minversion = 1.6
|
||||
envlist = py35,py34,py27,pypy,pep8
|
||||
|
||||
[testenv]
|
||||
deps = -r{toxinidir}/test-requirements.txt
|
||||
commands = python setup.py testr --slowest --testr-args='{posargs}'
|
||||
|
||||
[testenv:pep8]
|
||||
commands = flake8
|
||||
|
||||
[testenv:venv]
|
||||
commands = {posargs}
|
||||
|
||||
[testenv:docs]
|
||||
commands = python setup.py build_sphinx
|
||||
|
||||
[testenv:cover]
|
||||
commands = python setup.py test --coverage --coverage-package-name=oslo_serialization --testr-args='{posargs}'
|
||||
|
||||
[flake8]
|
||||
# E123, E125 skipped as they are invalid PEP-8.
|
||||
|
||||
show-source = True
|
||||
ignore = E123,E125
|
||||
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build
|
||||
|
||||
[testenv:pip-missing-reqs]
|
||||
# do not install test-requirements as that will pollute the virtualenv for
|
||||
# determining missing packages
|
||||
# this also means that pip-missing-reqs must be installed separately, outside
|
||||
# of the requirements.txt files
|
||||
deps = pip_missing_reqs
|
||||
commands = pip-missing-reqs -d --ignore-module=oslo_serialization* --ignore-module=pkg_resources --ignore-file=oslo_serialization/test.py --ignore-file=oslo_serialization/tests/* oslo_serialization
|
Loading…
x
Reference in New Issue
Block a user