Retire python-karborclient
As announced on the openstack-discuss ML[1], Karbor is retiring this cycle (Wallaby). This commit retires this repository as per the process defined in the project-guide[2]. Thank you to all the contributors of Karbor for your hard work! [1] http://lists.openstack.org/pipermail/openstack-discuss/2020-November/018643.html [2] https://docs.openstack.org/project-team-guide/repository.html#retiring-a-repository Depends-On: https://review.opendev.org/c/openstack/project-config/+/767030 Change-Id: Ic1b039239b8141097873b2f90c448d613c9c11df
This commit is contained in:
parent
c4b0bd2cdb
commit
e1087267bb
@ -1,7 +0,0 @@
|
||||
[run]
|
||||
branch = True
|
||||
source = karborclient
|
||||
omit = karborclient/tests/*,karborclient/openstack/*
|
||||
|
||||
[report]
|
||||
ignore_errors = True
|
56
.gitignore
vendored
56
.gitignore
vendored
@ -1,56 +0,0 @@
|
||||
*.py[cod]
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Packages
|
||||
*.egg
|
||||
*.egg-info
|
||||
dist
|
||||
build
|
||||
.eggs
|
||||
eggs
|
||||
parts
|
||||
bin
|
||||
var
|
||||
sdist
|
||||
develop-eggs
|
||||
.installed.cfg
|
||||
lib
|
||||
lib64
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
.coverage
|
||||
cover
|
||||
.tox
|
||||
nosetests.xml
|
||||
.testrepository
|
||||
.venv
|
||||
.log
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
||||
# Mr Developer
|
||||
.mr.developer.cfg
|
||||
.project
|
||||
.pydevproject
|
||||
|
||||
# Complexity
|
||||
output/*.html
|
||||
output/*/index.html
|
||||
|
||||
# Sphinx
|
||||
doc/build
|
||||
|
||||
# pbr generates these
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
|
||||
# Editors
|
||||
*~
|
||||
.*.swp
|
||||
.*sw?
|
3
.mailmap
3
.mailmap
@ -1,3 +0,0 @@
|
||||
# Format is:
|
||||
# <preferred e-mail> <other e-mail 1>
|
||||
# <preferred e-mail> <other e-mail 2>
|
@ -1,7 +0,0 @@
|
||||
[DEFAULT]
|
||||
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
||||
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
||||
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \
|
||||
${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION
|
||||
test_id_option=--load-list $IDFILE
|
||||
test_list_option=--list
|
@ -1,8 +0,0 @@
|
||||
- project:
|
||||
templates:
|
||||
- check-requirements
|
||||
- openstack-cover-jobs
|
||||
- openstack-lower-constraints-jobs
|
||||
- openstack-python3-victoria-jobs
|
||||
- openstackclient-plugin-jobs
|
||||
- publish-openstack-docs-pti
|
@ -1,17 +0,0 @@
|
||||
If you would like to contribute to the development of OpenStack, you must
|
||||
follow the steps in this page:
|
||||
|
||||
https://docs.openstack.org/infra/manual/developers.html
|
||||
|
||||
If you already have a good understanding of how the system works and your
|
||||
OpenStack accounts are set up, you can skip to the development workflow
|
||||
section of this documentation to learn how changes to OpenStack should be
|
||||
submitted for review via the Gerrit tool:
|
||||
|
||||
https://docs.openstack.org/infra/manual/developers.html#development-workflow
|
||||
|
||||
Pull requests submitted through GitHub will be ignored.
|
||||
|
||||
Bugs should be filed on Launchpad, not GitHub:
|
||||
|
||||
https://launchpad.net/python-karborclient
|
@ -1,4 +0,0 @@
|
||||
Style Commandments
|
||||
===============================================
|
||||
|
||||
Read the OpenStack Style Commandments https://docs.openstack.org/hacking/latest/
|
176
LICENSE
176
LICENSE
@ -1,176 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
78
README.rst
78
README.rst
@ -1,72 +1,10 @@
|
||||
========================
|
||||
Team and repository tags
|
||||
========================
|
||||
This project is no longer maintained.
|
||||
|
||||
.. image:: https://governance.openstack.org/tc/badges/python-karborclient.svg
|
||||
:target: https://governance.openstack.org/tc/reference/tags/index.html
|
||||
The contents of this repository are still available in the Git
|
||||
source code management system. To see the contents of this
|
||||
repository before it reached its end of life, please check out the
|
||||
previous commit with "git checkout HEAD^1".
|
||||
|
||||
.. Change things from this point on
|
||||
|
||||
======
|
||||
Karbor
|
||||
======
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/python-karborclient.svg
|
||||
:target: https://pypi.org/project/python-karborclient/
|
||||
:alt: Latest Version
|
||||
|
||||
|
||||
Karbor Mission Statement
|
||||
|
||||
* Formalize Application Data Protection in OpenStack (APIs, Services, Plugins, ...)
|
||||
* Be able to protect Any Resource in OpenStack(as well as their dependencies)
|
||||
* Allow Diversity of vendor solutions, capabilities and implementations
|
||||
without compromising usability
|
||||
|
||||
* `PyPi`_ - package installation
|
||||
* `Launchpad project`_ - release management
|
||||
* `Blueprints`_ - feature specifications
|
||||
* `Bugs`_ - issue tracking
|
||||
* `Source`_
|
||||
* `Specs`_
|
||||
* `How to Contribute`_
|
||||
|
||||
.. _PyPi: https://pypi.org/project/python-karborclient
|
||||
|
||||
.. _Launchpad project: https://launchpad.net/python-karborclient
|
||||
.. _Blueprints: https://blueprints.launchpad.net/python-karborclient
|
||||
.. _Bugs: https://bugs.launchpad.net/python-karborclient
|
||||
.. _Source: https://opendev.org/openstack/python-karborclient
|
||||
.. _Specs: https://docs.openstack.org/karbor/latest/specs/index.html
|
||||
.. _How to Contribute: https://docs.openstack.org/infra/manual/developers.html
|
||||
|
||||
|
||||
Python Karborclient
|
||||
-------------------
|
||||
python-karborclient is a client library for karbor built on the karbor API.
|
||||
It provides a Python API (the ``karborclient`` module) and a command-line tool
|
||||
(``karbor``).
|
||||
|
||||
Project Resources
|
||||
-----------------
|
||||
|
||||
Project status, bugs, and blueprints are tracked on Launchpad:
|
||||
|
||||
* Client bug tracker
|
||||
* https://launchpad.net/python-karborclient
|
||||
|
||||
* Karbor bug tracker
|
||||
* https://launchpad.net/karbor
|
||||
|
||||
Developer documentation can be found here:
|
||||
|
||||
https://docs.openstack.org/karbor/latest/
|
||||
|
||||
Additional resources are linked from the project wiki page:
|
||||
|
||||
https://wiki.openstack.org/wiki/karbor
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
Apache License Version 2.0 http://www.apache.org/licenses/LICENSE-2.0
|
||||
For any further questions, please email
|
||||
openstack-discuss@lists.openstack.org or join #openstack-dev on
|
||||
Freenode.
|
||||
|
@ -1,5 +0,0 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
sphinx>=2.0.0,!=2.1.0 # BSD
|
||||
openstackdocstheme>=2.2.1 # Apache-2.0
|
@ -1,80 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.abspath('../..'))
|
||||
# -- General configuration ----------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
#'sphinx.ext.intersphinx',
|
||||
'openstackdocstheme'
|
||||
]
|
||||
|
||||
# autodoc generation is a bit aggressive and a nuisance when doing heavy
|
||||
# text edit cycles.
|
||||
# execute "export SPHINX_DEBUG=1" in your terminal to disable
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'python-karborclient'
|
||||
copyright = u'2013, OpenStack Foundation'
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = True
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'native'
|
||||
|
||||
# -- Options for HTML output --------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
# html_theme_path = ["."]
|
||||
html_theme = 'openstackdocs'
|
||||
# html_static_path = ['static']
|
||||
|
||||
# openstackdocstheme options
|
||||
openstackdocs_repo_name = 'openstack/python-karborclient'
|
||||
openstackdocs_bug_project = 'python-karborclient'
|
||||
openstackdocs_bug_tag = ''
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = '%sdoc' % project
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass
|
||||
# [howto/manual]).
|
||||
latex_documents = [
|
||||
('index',
|
||||
'%s.tex' % project,
|
||||
u'%s Documentation' % project,
|
||||
u'OpenStack Foundation', 'manual'),
|
||||
]
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
#intersphinx_mapping = {'http://docs.python.org/': None}
|
@ -1,16 +0,0 @@
|
||||
============
|
||||
Contributing
|
||||
============
|
||||
|
||||
General Info
|
||||
------------
|
||||
|
||||
.. include:: ../../../CONTRIBUTING.rst
|
||||
|
||||
Approved Specs
|
||||
--------------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
../specs/index
|
@ -1,20 +0,0 @@
|
||||
Welcome to karborclient's documentation!
|
||||
========================================
|
||||
|
||||
Contents
|
||||
--------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
readme
|
||||
install/index
|
||||
user/index
|
||||
contributor/index
|
||||
|
||||
Indices and tables
|
||||
------------------
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`search`
|
||||
|
@ -1,12 +0,0 @@
|
||||
============
|
||||
Installation
|
||||
============
|
||||
|
||||
At the command line::
|
||||
|
||||
$ pip install python-karborclient
|
||||
|
||||
Or, if you have virtualenvwrapper installed::
|
||||
|
||||
$ mkvirtualenv python-karborclient
|
||||
$ pip install python-karborclient
|
@ -1,5 +0,0 @@
|
||||
============
|
||||
Introduction
|
||||
============
|
||||
|
||||
.. include:: ../../README.rst
|
@ -1,13 +0,0 @@
|
||||
Specs
|
||||
=====
|
||||
|
||||
This section contains detailed specification documents for
|
||||
different features inside Karbor Client.
|
||||
|
||||
Approved Specs
|
||||
--------------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
karbor-support-in-python-openstackclient
|
@ -1,166 +0,0 @@
|
||||
..
|
||||
This work is licensed under a Creative Commons Attribution 3.0 Unported
|
||||
License.
|
||||
|
||||
http://creativecommons.org/licenses/by/3.0/legalcode
|
||||
|
||||
========================================
|
||||
Karbor support in python-openstackclient
|
||||
========================================
|
||||
|
||||
Implement a new set of karbor commands as python-openstackclient plugins.
|
||||
|
||||
Launchpad Blueprint:
|
||||
https://blueprints.launchpad.net/python-karborclient/+spec/karbor-support-python-openstackclient
|
||||
|
||||
|
||||
Problem Description
|
||||
===================
|
||||
|
||||
python-openstackclient is becoming the default command line client for many
|
||||
OpenStack projects. Karbor would benefit from implementing all of its client
|
||||
commands as a single python-openstackclient plugin implemented in the
|
||||
python-karborclient repository.
|
||||
|
||||
Proposed Change
|
||||
===============
|
||||
|
||||
The intent of this spec is to identify the commands to be implemented and
|
||||
establish conventions for command and argument names. This spec is not
|
||||
intended to be a full and correct specification of command and argument names.
|
||||
The details can be left to the code reviews for the commands themselves.
|
||||
|
||||
The following conventions will be adopted for command names:
|
||||
|
||||
* As the ``OpenStackClient`` convention, the command name shall always take
|
||||
the following form:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
openstack [<global-options>] <object-1> <action> [<object-2>] \
|
||||
[command-arguments]
|
||||
|
||||
|
||||
As a example:
|
||||
The following ``karbor`` commands about plan will be implemented for ``openstack``
|
||||
initially suggesting these command names:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
karbor plan-create <name> <provider_id> <resources>
|
||||
openstack data protection plan create <name> <provider_id> <resources>
|
||||
|
||||
karbor plan-delete <plan>
|
||||
openstack data protection plan delete <plan>
|
||||
|
||||
karbor plan-list
|
||||
openstack data protection plan list
|
||||
|
||||
karbor plan-show <plan>
|
||||
openstack data protection plan show <plan>
|
||||
|
||||
karbor plan-update <name> <resources> <status>
|
||||
openstack data protection plan update <name> <resources> <status>
|
||||
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
None
|
||||
|
||||
Database
|
||||
--------
|
||||
|
||||
None
|
||||
|
||||
Public API
|
||||
----------
|
||||
|
||||
None
|
||||
|
||||
Public API Security
|
||||
-------------------
|
||||
|
||||
None
|
||||
|
||||
Python API
|
||||
----------
|
||||
|
||||
None
|
||||
|
||||
CLI (python-karborclient)
|
||||
-------------------------
|
||||
|
||||
A new directory named osc will be created under /karborclient/osc
|
||||
for the ``OpenStackClient`` plugin and the commands mentioned above.
|
||||
|
||||
Internal API
|
||||
------------
|
||||
|
||||
None
|
||||
|
||||
Guest Agent
|
||||
-----------
|
||||
|
||||
None
|
||||
|
||||
Alternatives
|
||||
------------
|
||||
|
||||
None
|
||||
|
||||
Dashboard Impact (UX)
|
||||
=====================
|
||||
|
||||
None
|
||||
|
||||
Implementation
|
||||
==============
|
||||
|
||||
Assignee(s)
|
||||
-----------
|
||||
|
||||
Primary assignee:
|
||||
chenying
|
||||
|
||||
|
||||
Milestones
|
||||
----------
|
||||
|
||||
|
||||
Work Items
|
||||
----------
|
||||
|
||||
CLI commands as stated above.
|
||||
Unit tests
|
||||
|
||||
Upgrade Implications
|
||||
====================
|
||||
|
||||
None
|
||||
|
||||
Dependencies
|
||||
============
|
||||
|
||||
python-openstackclient
|
||||
osc-lib
|
||||
|
||||
Testing
|
||||
=======
|
||||
|
||||
Unit tests will be located in: /karborclient/tests/unit/osc/
|
||||
|
||||
Documentation Impact
|
||||
====================
|
||||
|
||||
OpenStack Client adoption list will be updated to include python-karborclient.
|
||||
|
||||
References
|
||||
==========
|
||||
|
||||
https://docs.openstack.org/python-openstackclient/latest/
|
||||
|
||||
Appendix
|
||||
========
|
||||
|
||||
None
|
@ -1,7 +0,0 @@
|
||||
=====
|
||||
Usage
|
||||
=====
|
||||
|
||||
To use karborclient in a project::
|
||||
|
||||
import karborclient
|
@ -1,19 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import pbr.version
|
||||
|
||||
|
||||
__version__ = pbr.version.VersionInfo(
|
||||
'python-karborclient').version_string()
|
@ -1,20 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from oslo_utils import importutils
|
||||
|
||||
|
||||
def Client(version, *args, **kwargs):
|
||||
module = importutils.import_versioned_module(
|
||||
'karborclient', version, 'client'
|
||||
)
|
||||
client_class = getattr(module, 'Client')
|
||||
return client_class(*args, **kwargs)
|
@ -1,218 +0,0 @@
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# Copyright 2013 Spanish National Research Council.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# E0202: An attribute inherited from %s hide this method
|
||||
# pylint: disable=E0202
|
||||
|
||||
import abc
|
||||
import argparse
|
||||
import os
|
||||
|
||||
import six
|
||||
from stevedore import extension
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
|
||||
|
||||
_discovered_plugins = {}
|
||||
|
||||
|
||||
def discover_auth_systems():
|
||||
"""Discover the available auth-systems.
|
||||
|
||||
This won't take into account the old style auth-systems.
|
||||
"""
|
||||
global _discovered_plugins
|
||||
_discovered_plugins = {}
|
||||
|
||||
def add_plugin(ext):
|
||||
_discovered_plugins[ext.name] = ext.plugin
|
||||
|
||||
ep_namespace = "karborclient.common.apiclient.auth"
|
||||
mgr = extension.ExtensionManager(ep_namespace)
|
||||
mgr.map(add_plugin)
|
||||
|
||||
|
||||
def load_auth_system_opts(parser):
|
||||
"""Load options needed by the available auth-systems into a parser.
|
||||
|
||||
This function will try to populate the parser with options from the
|
||||
available plugins.
|
||||
"""
|
||||
group = parser.add_argument_group("Common auth options")
|
||||
BaseAuthPlugin.add_common_opts(group)
|
||||
for name, auth_plugin in _discovered_plugins.items():
|
||||
group = parser.add_argument_group(
|
||||
"Auth-system '%s' options" % name,
|
||||
conflict_handler="resolve")
|
||||
auth_plugin.add_opts(group)
|
||||
|
||||
|
||||
def load_plugin(auth_system):
|
||||
try:
|
||||
plugin_class = _discovered_plugins[auth_system]
|
||||
except KeyError:
|
||||
raise exceptions.AuthSystemNotFound(auth_system)
|
||||
return plugin_class(auth_system=auth_system)
|
||||
|
||||
|
||||
def load_plugin_from_args(args):
|
||||
"""Load required plugin and populate it with options.
|
||||
|
||||
Try to guess auth system if it is not specified. Systems are tried in
|
||||
alphabetical order.
|
||||
|
||||
:type args: argparse.Namespace
|
||||
:raises: AuthPluginOptionsMissing
|
||||
"""
|
||||
auth_system = args.os_auth_system
|
||||
if auth_system:
|
||||
plugin = load_plugin(auth_system)
|
||||
plugin.parse_opts(args)
|
||||
plugin.sufficient_options()
|
||||
return plugin
|
||||
|
||||
for plugin_auth_system in sorted(six.iterkeys(_discovered_plugins)):
|
||||
plugin_class = _discovered_plugins[plugin_auth_system]
|
||||
plugin = plugin_class()
|
||||
plugin.parse_opts(args)
|
||||
try:
|
||||
plugin.sufficient_options()
|
||||
except exceptions.AuthPluginOptionsMissing:
|
||||
continue
|
||||
return plugin
|
||||
raise exceptions.AuthPluginOptionsMissing(["auth_system"])
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BaseAuthPlugin(object):
|
||||
"""Base class for authentication plugins.
|
||||
|
||||
An authentication plugin needs to override at least the authenticate
|
||||
method to be a valid plugin.
|
||||
"""
|
||||
|
||||
auth_system = None
|
||||
opt_names = []
|
||||
common_opt_names = [
|
||||
"auth_system",
|
||||
"username",
|
||||
"password",
|
||||
"tenant_name",
|
||||
"token",
|
||||
"auth_url",
|
||||
]
|
||||
|
||||
def __init__(self, auth_system=None, **kwargs):
|
||||
self.auth_system = auth_system or self.auth_system
|
||||
self.opts = dict((name, kwargs.get(name))
|
||||
for name in self.opt_names)
|
||||
|
||||
@staticmethod
|
||||
def _parser_add_opt(parser, opt):
|
||||
"""Add an option to parser in two variants.
|
||||
|
||||
:param opt: option name (with underscores)
|
||||
"""
|
||||
dashed_opt = opt.replace("_", "-")
|
||||
env_var = "OS_%s" % opt.upper()
|
||||
arg_default = os.environ.get(env_var, "")
|
||||
arg_help = "Defaults to env[%s]." % env_var
|
||||
parser.add_argument(
|
||||
"--os-%s" % dashed_opt,
|
||||
metavar="<%s>" % dashed_opt,
|
||||
default=arg_default,
|
||||
help=arg_help)
|
||||
parser.add_argument(
|
||||
"--os_%s" % opt,
|
||||
metavar="<%s>" % dashed_opt,
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
@classmethod
|
||||
def add_opts(cls, parser):
|
||||
"""Populate the parser with the options for this plugin."""
|
||||
for opt in cls.opt_names:
|
||||
# use `BaseAuthPlugin.common_opt_names` since it is never
|
||||
# changed in child classes
|
||||
if opt not in BaseAuthPlugin.common_opt_names:
|
||||
cls._parser_add_opt(parser, opt)
|
||||
|
||||
@classmethod
|
||||
def add_common_opts(cls, parser):
|
||||
"""Add options that are common for several plugins."""
|
||||
for opt in cls.common_opt_names:
|
||||
cls._parser_add_opt(parser, opt)
|
||||
|
||||
@staticmethod
|
||||
def get_opt(opt_name, args):
|
||||
"""Return option name and value.
|
||||
|
||||
:param opt_name: name of the option, e.g., "username"
|
||||
:param args: parsed arguments
|
||||
"""
|
||||
return (opt_name, getattr(args, "os_%s" % opt_name, None))
|
||||
|
||||
def parse_opts(self, args):
|
||||
"""Parse the actual auth-system options if any.
|
||||
|
||||
This method is expected to populate the attribute `self.opts` with a
|
||||
dict containing the options and values needed to make authentication.
|
||||
"""
|
||||
self.opts.update(dict(self.get_opt(opt_name, args)
|
||||
for opt_name in self.opt_names))
|
||||
|
||||
def authenticate(self, http_client):
|
||||
"""Authenticate using plugin defined method.
|
||||
|
||||
The method usually analyses `self.opts` and performs
|
||||
a request to authentication server.
|
||||
|
||||
:param http_client: client object that needs authentication
|
||||
:type http_client: HTTPClient
|
||||
:raises: AuthorizationFailure
|
||||
"""
|
||||
self.sufficient_options()
|
||||
self._do_authenticate(http_client)
|
||||
|
||||
@abc.abstractmethod
|
||||
def _do_authenticate(self, http_client):
|
||||
"""Protected method for authentication."""
|
||||
|
||||
def sufficient_options(self):
|
||||
"""Check if all required options are present.
|
||||
|
||||
:raises: AuthPluginOptionsMissing
|
||||
"""
|
||||
missing = [opt
|
||||
for opt in self.opt_names
|
||||
if not self.opts.get(opt)]
|
||||
if missing:
|
||||
raise exceptions.AuthPluginOptionsMissing(missing)
|
||||
|
||||
@abc.abstractmethod
|
||||
def token_and_endpoint(self, endpoint_type, service_type):
|
||||
"""Return token and endpoint.
|
||||
|
||||
:param service_type: Service type of the endpoint
|
||||
:type service_type: string
|
||||
:param endpoint_type: Type of endpoint.
|
||||
Possible values: public or publicURL,
|
||||
internal or internalURL,
|
||||
admin or adminURL
|
||||
:type endpoint_type: string
|
||||
:returns: tuple of token and endpoint strings
|
||||
:raises: EndpointException
|
||||
"""
|
@ -1,525 +0,0 @@
|
||||
# Copyright 2010 Jacob Kaplan-Moss
|
||||
# Copyright 2011 OpenStack Foundation
|
||||
# Copyright 2012 Grid Dynamics
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Base utilities to build API operation managers and objects on top of.
|
||||
"""
|
||||
|
||||
# E1102: %s is not callable
|
||||
# pylint: disable=E1102
|
||||
|
||||
import abc
|
||||
import copy
|
||||
|
||||
from oslo_utils import strutils
|
||||
from oslo_utils import uuidutils
|
||||
import six
|
||||
from six.moves.urllib import parse
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
from karborclient.i18n import _
|
||||
|
||||
|
||||
def getid(obj):
|
||||
"""Return id if argument is a Resource.
|
||||
|
||||
Abstracts the common pattern of allowing both an object or an object's ID
|
||||
(UUID) as a parameter when dealing with relationships.
|
||||
"""
|
||||
try:
|
||||
if obj.uuid:
|
||||
return obj.uuid
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
return obj.id
|
||||
except AttributeError:
|
||||
return obj
|
||||
|
||||
|
||||
# TODO(aababilov): call run_hooks() in HookableMixin's child classes
|
||||
class HookableMixin(object):
|
||||
"""Mixin so classes can register and run hooks."""
|
||||
_hooks_map = {}
|
||||
|
||||
@classmethod
|
||||
def add_hook(cls, hook_type, hook_func):
|
||||
"""Add a new hook of specified type.
|
||||
|
||||
:param cls: class that registers hooks
|
||||
:param hook_type: hook type, e.g., '__pre_parse_args__'
|
||||
:param hook_func: hook function
|
||||
"""
|
||||
if hook_type not in cls._hooks_map:
|
||||
cls._hooks_map[hook_type] = []
|
||||
|
||||
cls._hooks_map[hook_type].append(hook_func)
|
||||
|
||||
@classmethod
|
||||
def run_hooks(cls, hook_type, *args, **kwargs):
|
||||
"""Run all hooks of specified type.
|
||||
|
||||
:param cls: class that registers hooks
|
||||
:param hook_type: hook type, e.g., '__pre_parse_args__'
|
||||
:param args: args to be passed to every hook function
|
||||
:param kwargs: kwargs to be passed to every hook function
|
||||
"""
|
||||
hook_funcs = cls._hooks_map.get(hook_type) or []
|
||||
for hook_func in hook_funcs:
|
||||
hook_func(*args, **kwargs)
|
||||
|
||||
|
||||
class BaseManager(HookableMixin):
|
||||
"""Basic manager type providing common operations.
|
||||
|
||||
Managers interact with a particular type of API (servers, flavors, images,
|
||||
etc.) and provide CRUD operations for them.
|
||||
"""
|
||||
resource_class = None
|
||||
|
||||
def __init__(self, client):
|
||||
"""Initializes BaseManager with `client`.
|
||||
|
||||
:param client: instance of BaseClient descendant for HTTP requests
|
||||
"""
|
||||
super(BaseManager, self).__init__()
|
||||
self.client = client
|
||||
|
||||
def _list(self, url, response_key, obj_class=None, json=None):
|
||||
"""List the collection.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers'
|
||||
:param response_key: the key to be looked up in response dictionary,
|
||||
e.g., 'servers'
|
||||
:param obj_class: class for constructing the returned objects
|
||||
(self.resource_class will be used by default)
|
||||
:param json: data that will be encoded as JSON and passed in POST
|
||||
request (GET will be sent by default)
|
||||
"""
|
||||
if json:
|
||||
body = self.client.post(url, json=json).json()
|
||||
else:
|
||||
body = self.client.get(url).json()
|
||||
|
||||
if obj_class is None:
|
||||
obj_class = self.resource_class
|
||||
|
||||
data = body[response_key]
|
||||
# NOTE(ja): keystone returns values as list as {'values': [ ... ]}
|
||||
# unlike other services which just return the list...
|
||||
try:
|
||||
data = data['values']
|
||||
except (KeyError, TypeError):
|
||||
pass
|
||||
|
||||
return [obj_class(self, res, loaded=True) for res in data if res]
|
||||
|
||||
def _get(self, url, response_key):
|
||||
"""Get an object from collection.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers'
|
||||
:param response_key: the key to be looked up in response dictionary,
|
||||
e.g., 'server'
|
||||
"""
|
||||
body = self.client.get(url).json()
|
||||
return self.resource_class(self, body[response_key], loaded=True)
|
||||
|
||||
def _head(self, url):
|
||||
"""Retrieve request headers for an object.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers'
|
||||
"""
|
||||
resp = self.client.head(url)
|
||||
return resp.status_code == 204
|
||||
|
||||
def _post(self, url, json, response_key, return_raw=False):
|
||||
"""Create an object.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers'
|
||||
:param json: data that will be encoded as JSON and passed in POST
|
||||
request (GET will be sent by default)
|
||||
:param response_key: the key to be looked up in response dictionary,
|
||||
e.g., 'servers'
|
||||
:param return_raw: flag to force returning raw JSON instead of
|
||||
Python object of self.resource_class
|
||||
"""
|
||||
body = self.client.post(url, json=json).json()
|
||||
if return_raw:
|
||||
return body[response_key]
|
||||
return self.resource_class(self, body[response_key])
|
||||
|
||||
def _put(self, url, json=None, response_key=None):
|
||||
"""Update an object with PUT method.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers'
|
||||
:param json: data that will be encoded as JSON and passed in POST
|
||||
request (GET will be sent by default)
|
||||
:param response_key: the key to be looked up in response dictionary,
|
||||
e.g., 'servers'
|
||||
"""
|
||||
resp = self.client.put(url, json=json)
|
||||
# PUT requests may not return a body
|
||||
if resp.content:
|
||||
body = resp.json()
|
||||
if response_key is not None:
|
||||
return self.resource_class(self, body[response_key])
|
||||
else:
|
||||
return self.resource_class(self, body)
|
||||
|
||||
def _patch(self, url, json=None, response_key=None):
|
||||
"""Update an object with PATCH method.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers'
|
||||
:param json: data that will be encoded as JSON and passed in POST
|
||||
request (GET will be sent by default)
|
||||
:param response_key: the key to be looked up in response dictionary,
|
||||
e.g., 'servers'
|
||||
"""
|
||||
body = self.client.patch(url, json=json).json()
|
||||
if response_key is not None:
|
||||
return self.resource_class(self, body[response_key])
|
||||
else:
|
||||
return self.resource_class(self, body)
|
||||
|
||||
def _delete(self, url):
|
||||
"""Delete an object.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers/my-server'
|
||||
"""
|
||||
return self.client.delete(url)
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ManagerWithFind(BaseManager):
|
||||
"""Manager with additional `find()`/`findall()` methods."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def list(self):
|
||||
pass
|
||||
|
||||
def find(self, **kwargs):
|
||||
"""Find a single item with attributes matching ``**kwargs``.
|
||||
|
||||
This isn't very efficient: it loads the entire list then filters on
|
||||
the Python side.
|
||||
"""
|
||||
matches = self.findall(**kwargs)
|
||||
num_matches = len(matches)
|
||||
if num_matches == 0:
|
||||
msg = _("No %(name)s matching %(args)s.") % {
|
||||
'name': self.resource_class.__name__,
|
||||
'args': kwargs
|
||||
}
|
||||
raise exceptions.NotFound(msg)
|
||||
elif num_matches > 1:
|
||||
raise exceptions.NoUniqueMatch()
|
||||
else:
|
||||
return matches[0]
|
||||
|
||||
def findall(self, **kwargs):
|
||||
"""Find all items with attributes matching ``**kwargs``.
|
||||
|
||||
This isn't very efficient: it loads the entire list then filters on
|
||||
the Python side.
|
||||
"""
|
||||
found = []
|
||||
searches = kwargs.items()
|
||||
|
||||
for obj in self.list():
|
||||
try:
|
||||
if all(getattr(obj, attr) == value
|
||||
for (attr, value) in searches):
|
||||
found.append(obj)
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
return found
|
||||
|
||||
|
||||
class CrudManager(BaseManager):
|
||||
"""Base manager class for manipulating entities.
|
||||
|
||||
Children of this class are expected to define a `collection_key` and `key`.
|
||||
|
||||
- `collection_key`: Usually a plural noun by convention (e.g. `entities`);
|
||||
used to refer collections in both URL's (e.g. `/v3/entities`) and JSON
|
||||
objects containing a list of member resources (e.g. `{'entities': [{},
|
||||
{}, {}]}`).
|
||||
- `key`: Usually a singular noun by convention (e.g. `entity`); used to
|
||||
refer to an individual member of the collection.
|
||||
|
||||
"""
|
||||
collection_key = None
|
||||
key = None
|
||||
|
||||
def build_url(self, base_url=None, **kwargs):
|
||||
"""Builds a resource URL for the given kwargs.
|
||||
|
||||
Given an example collection where `collection_key = 'entities'` and
|
||||
`key = 'entity'`, the following URL's could be generated.
|
||||
|
||||
By default, the URL will represent a collection of entities, e.g.::
|
||||
|
||||
/entities
|
||||
|
||||
If kwargs contains an `entity_id`, then the URL will represent a
|
||||
specific member, e.g.::
|
||||
|
||||
/entities/{entity_id}
|
||||
|
||||
:param base_url: if provided, the generated URL will be appended to it
|
||||
"""
|
||||
url = base_url if base_url is not None else ''
|
||||
|
||||
url += '/%s' % self.collection_key
|
||||
|
||||
# do we have a specific entity?
|
||||
entity_id = kwargs.get('%s_id' % self.key)
|
||||
if entity_id is not None:
|
||||
url += '/%s' % entity_id
|
||||
|
||||
return url
|
||||
|
||||
def _filter_kwargs(self, kwargs):
|
||||
"""Drop null values and handle ids."""
|
||||
for key, ref in kwargs.copy().items():
|
||||
if ref is None:
|
||||
kwargs.pop(key)
|
||||
else:
|
||||
if isinstance(ref, Resource):
|
||||
kwargs.pop(key)
|
||||
kwargs['%s_id' % key] = getid(ref)
|
||||
return kwargs
|
||||
|
||||
def create(self, **kwargs):
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
return self._post(
|
||||
self.build_url(**kwargs),
|
||||
{self.key: kwargs},
|
||||
self.key)
|
||||
|
||||
def get(self, **kwargs):
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
return self._get(
|
||||
self.build_url(**kwargs),
|
||||
self.key)
|
||||
|
||||
def head(self, **kwargs):
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
return self._head(self.build_url(**kwargs))
|
||||
|
||||
def list(self, base_url=None, **kwargs):
|
||||
"""List the collection.
|
||||
|
||||
:param base_url: if provided, the generated URL will be appended to it
|
||||
"""
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
|
||||
return self._list(
|
||||
'%(base_url)s%(query)s' % {
|
||||
'base_url': self.build_url(base_url=base_url, **kwargs),
|
||||
'query': '?%s' % parse.urlencode(kwargs) if kwargs else '',
|
||||
},
|
||||
self.collection_key)
|
||||
|
||||
def put(self, base_url=None, **kwargs):
|
||||
"""Update an element.
|
||||
|
||||
:param base_url: if provided, the generated URL will be appended to it
|
||||
"""
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
|
||||
return self._put(self.build_url(base_url=base_url, **kwargs))
|
||||
|
||||
def update(self, **kwargs):
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
params = kwargs.copy()
|
||||
params.pop('%s_id' % self.key)
|
||||
|
||||
return self._patch(
|
||||
self.build_url(**kwargs),
|
||||
{self.key: params},
|
||||
self.key)
|
||||
|
||||
def delete(self, **kwargs):
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
|
||||
return self._delete(
|
||||
self.build_url(**kwargs))
|
||||
|
||||
def find(self, base_url=None, **kwargs):
|
||||
"""Find a single item with attributes matching ``**kwargs``.
|
||||
|
||||
:param base_url: if provided, the generated URL will be appended to it
|
||||
"""
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
|
||||
rl = self._list(
|
||||
'%(base_url)s%(query)s' % {
|
||||
'base_url': self.build_url(base_url=base_url, **kwargs),
|
||||
'query': '?%s' % parse.urlencode(kwargs) if kwargs else '',
|
||||
},
|
||||
self.collection_key)
|
||||
num = len(rl)
|
||||
|
||||
if num == 0:
|
||||
msg = _("No %(name)s matching %(args)s.") % {
|
||||
'name': self.resource_class.__name__,
|
||||
'args': kwargs
|
||||
}
|
||||
raise exceptions.NotFound(404, msg)
|
||||
elif num > 1:
|
||||
raise exceptions.NoUniqueMatch
|
||||
else:
|
||||
return rl[0]
|
||||
|
||||
|
||||
class Extension(HookableMixin):
|
||||
"""Extension descriptor."""
|
||||
|
||||
SUPPORTED_HOOKS = ('__pre_parse_args__', '__post_parse_args__')
|
||||
manager_class = None
|
||||
|
||||
def __init__(self, name, module):
|
||||
super(Extension, self).__init__()
|
||||
self.name = name
|
||||
self.module = module
|
||||
self._parse_extension_module()
|
||||
|
||||
def _parse_extension_module(self):
|
||||
self.manager_class = None
|
||||
for attr_name, attr_value in self.module.__dict__.items():
|
||||
if attr_name in self.SUPPORTED_HOOKS:
|
||||
self.add_hook(attr_name, attr_value)
|
||||
else:
|
||||
try:
|
||||
if issubclass(attr_value, BaseManager):
|
||||
self.manager_class = attr_value
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
def __repr__(self):
|
||||
return "<Extension '%s'>" % self.name
|
||||
|
||||
|
||||
class Resource(object):
|
||||
"""Base class for OpenStack resources (tenant, user, etc.).
|
||||
|
||||
This is pretty much just a bag for attributes.
|
||||
"""
|
||||
|
||||
HUMAN_ID = False
|
||||
NAME_ATTR = 'name'
|
||||
|
||||
def __init__(self, manager, info, loaded=False):
|
||||
"""Populate and bind to a manager.
|
||||
|
||||
:param manager: BaseManager object
|
||||
:param info: dictionary representing resource attributes
|
||||
:param loaded: prevent lazy-loading if set to True
|
||||
"""
|
||||
self.manager = manager
|
||||
self._info = info
|
||||
self._add_details(info)
|
||||
self._loaded = loaded
|
||||
self._init_completion_cache()
|
||||
|
||||
def _init_completion_cache(self):
|
||||
cache_write = getattr(self.manager, 'write_to_completion_cache', None)
|
||||
if not cache_write:
|
||||
return
|
||||
|
||||
# NOTE(sirp): ensure `id` is already present because if it isn't we'll
|
||||
# enter an infinite loop of __getattr__ -> get -> __init__ ->
|
||||
# __getattr__ -> ...
|
||||
if 'id' in self.__dict__ and uuidutils.is_uuid_like(self.id):
|
||||
cache_write('uuid', self.id)
|
||||
|
||||
if self.human_id:
|
||||
cache_write('human_id', self.human_id)
|
||||
|
||||
def __repr__(self):
|
||||
reprkeys = sorted(k
|
||||
for k in self.__dict__.keys()
|
||||
if k[0] != '_' and k != 'manager')
|
||||
info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys)
|
||||
return "<%s %s>" % (self.__class__.__name__, info)
|
||||
|
||||
@property
|
||||
def human_id(self):
|
||||
"""Human-readable ID which can be used for bash completion."""
|
||||
if self.HUMAN_ID:
|
||||
name = getattr(self, self.NAME_ATTR, None)
|
||||
if name is not None:
|
||||
return strutils.to_slug(name)
|
||||
return None
|
||||
|
||||
def _add_details(self, info):
|
||||
for (k, v) in info.items():
|
||||
try:
|
||||
setattr(self, k, v)
|
||||
self._info[k] = v
|
||||
except AttributeError:
|
||||
# In this case we already defined the attribute on the class
|
||||
pass
|
||||
|
||||
def __getattr__(self, k):
|
||||
if k not in self.__dict__:
|
||||
# NOTE(bcwaldon): disallow lazy-loading if already loaded once
|
||||
if not self.is_loaded():
|
||||
self.get()
|
||||
return self.__getattr__(k)
|
||||
|
||||
raise AttributeError(k)
|
||||
else:
|
||||
return self.__dict__[k]
|
||||
|
||||
def get(self):
|
||||
"""Support for lazy loading details.
|
||||
|
||||
Some clients, such as novaclient have the option to lazy load the
|
||||
details, details which can be loaded with this function.
|
||||
"""
|
||||
# set_loaded() first ... so if we have to bail, we know we tried.
|
||||
self.set_loaded(True)
|
||||
if not hasattr(self.manager, 'get'):
|
||||
return
|
||||
|
||||
new = self.manager.get(self.id)
|
||||
if new:
|
||||
self._add_details(new._info)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Resource):
|
||||
return NotImplemented
|
||||
# two resources of different types are not equal
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
return self._info == other._info
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def is_loaded(self):
|
||||
return self._loaded
|
||||
|
||||
def set_loaded(self, val):
|
||||
self._loaded = val
|
||||
|
||||
def to_dict(self):
|
||||
return copy.deepcopy(self._info)
|
@ -1,359 +0,0 @@
|
||||
# Copyright 2010 Jacob Kaplan-Moss
|
||||
# Copyright 2011 OpenStack Foundation
|
||||
# Copyright 2011 Piston Cloud Computing, Inc.
|
||||
# Copyright 2013 Alessio Ababilov
|
||||
# Copyright 2013 Grid Dynamics
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
OpenStack Client interface. Handles the REST calls and responses.
|
||||
"""
|
||||
|
||||
# E0202: An attribute inherited from %s hide this method
|
||||
# pylint: disable=E0202
|
||||
|
||||
import time
|
||||
|
||||
from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import importutils
|
||||
import requests
|
||||
|
||||
from karborclient.i18n import _
|
||||
from karborclient.openstack.common.apiclient import exceptions
|
||||
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HTTPClient(object):
|
||||
"""This client handles sending HTTP requests to OpenStack servers.
|
||||
|
||||
Features:
|
||||
|
||||
- share authentication information between several clients to different
|
||||
services (e.g., for compute and image clients);
|
||||
- reissue authentication request for expired tokens;
|
||||
- encode/decode JSON bodies;
|
||||
- raise exceptions on HTTP errors;
|
||||
- pluggable authentication;
|
||||
- store authentication information in a keyring;
|
||||
- store time spent for requests;
|
||||
- register clients for particular services, so one can use
|
||||
`http_client.identity` or `http_client.compute`;
|
||||
- log requests and responses in a format that is easy to copy-and-paste
|
||||
into terminal and send the same request with curl.
|
||||
"""
|
||||
|
||||
user_agent = "karborclient.common.apiclient"
|
||||
|
||||
def __init__(self,
|
||||
auth_plugin,
|
||||
region_name=None,
|
||||
endpoint_type="publicURL",
|
||||
original_ip=None,
|
||||
verify=True,
|
||||
cert=None,
|
||||
timeout=None,
|
||||
timings=False,
|
||||
keyring_saver=None,
|
||||
debug=False,
|
||||
user_agent=None,
|
||||
http=None):
|
||||
self.auth_plugin = auth_plugin
|
||||
|
||||
self.endpoint_type = endpoint_type
|
||||
self.region_name = region_name
|
||||
|
||||
self.original_ip = original_ip
|
||||
self.timeout = timeout
|
||||
self.verify = verify
|
||||
self.cert = cert
|
||||
|
||||
self.keyring_saver = keyring_saver
|
||||
self.debug = debug
|
||||
self.user_agent = user_agent or self.user_agent
|
||||
|
||||
self.times = [] # [("item", starttime, endtime), ...]
|
||||
self.timings = timings
|
||||
|
||||
# requests within the same session can reuse TCP connections from pool
|
||||
self.http = http or requests.Session()
|
||||
|
||||
self.cached_token = None
|
||||
|
||||
def _http_log_req(self, method, url, kwargs):
|
||||
if not self.debug:
|
||||
return
|
||||
|
||||
string_parts = [
|
||||
"curl -i",
|
||||
"-X '%s'" % method,
|
||||
"'%s'" % url,
|
||||
]
|
||||
|
||||
for element in kwargs['headers']:
|
||||
header = "-H '%s: %s'" % (element, kwargs['headers'][element])
|
||||
string_parts.append(header)
|
||||
|
||||
_logger.debug("REQ: %s" % " ".join(string_parts))
|
||||
if 'data' in kwargs:
|
||||
_logger.debug("REQ BODY: %s\n" % (kwargs['data']))
|
||||
|
||||
def _http_log_resp(self, resp):
|
||||
if not self.debug:
|
||||
return
|
||||
_logger.debug(
|
||||
"RESP: [%s] %s\n",
|
||||
resp.status_code,
|
||||
resp.headers)
|
||||
if resp._content_consumed:
|
||||
_logger.debug(
|
||||
"RESP BODY: %s\n",
|
||||
resp.text)
|
||||
|
||||
def serialize(self, kwargs):
|
||||
if kwargs.get('json') is not None:
|
||||
kwargs['headers']['Content-Type'] = 'application/json'
|
||||
kwargs['data'] = jsonutils.dumps(kwargs['json'])
|
||||
try:
|
||||
del kwargs['json']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def get_timings(self):
|
||||
return self.times
|
||||
|
||||
def reset_timings(self):
|
||||
self.times = []
|
||||
|
||||
def request(self, method, url, **kwargs):
|
||||
"""Send an http request with the specified characteristics.
|
||||
|
||||
Wrapper around `requests.Session.request` to handle tasks such as
|
||||
setting headers, JSON encoding/decoding, and error handling.
|
||||
|
||||
:param method: method of HTTP request
|
||||
:param url: URL of HTTP request
|
||||
:param kwargs: any other parameter that can be passed to
|
||||
requests.Session.request (such as `headers`) or `json`
|
||||
that will be encoded as JSON and used as `data` argument
|
||||
"""
|
||||
kwargs.setdefault("headers", kwargs.get("headers", {}))
|
||||
kwargs["headers"]["User-Agent"] = self.user_agent
|
||||
if self.original_ip:
|
||||
kwargs["headers"]["Forwarded"] = "for=%s;by=%s" % (
|
||||
self.original_ip, self.user_agent)
|
||||
if self.timeout is not None:
|
||||
kwargs.setdefault("timeout", self.timeout)
|
||||
kwargs.setdefault("verify", self.verify)
|
||||
if self.cert is not None:
|
||||
kwargs.setdefault("cert", self.cert)
|
||||
self.serialize(kwargs)
|
||||
|
||||
self._http_log_req(method, url, kwargs)
|
||||
if self.timings:
|
||||
start_time = time.time()
|
||||
resp = self.http.request(method, url, **kwargs)
|
||||
if self.timings:
|
||||
self.times.append(("%s %s" % (method, url),
|
||||
start_time, time.time()))
|
||||
self._http_log_resp(resp)
|
||||
|
||||
if resp.status_code >= 400:
|
||||
_logger.debug(
|
||||
"Request returned failure status: %s",
|
||||
resp.status_code)
|
||||
raise exceptions.from_response(resp, method, url)
|
||||
|
||||
return resp
|
||||
|
||||
@staticmethod
|
||||
def concat_url(endpoint, url):
|
||||
"""Concatenate endpoint and final URL.
|
||||
|
||||
E.g., "http://keystone/v2.0/" and "/tokens" are concatenated to
|
||||
"http://keystone/v2.0/tokens".
|
||||
|
||||
:param endpoint: the base URL
|
||||
:param url: the final URL
|
||||
"""
|
||||
return "%s/%s" % (endpoint.rstrip("/"), url.strip("/"))
|
||||
|
||||
def client_request(self, client, method, url, **kwargs):
|
||||
"""Send an http request using `client`'s endpoint and specified `url`.
|
||||
|
||||
If request was rejected as unauthorized (possibly because the token is
|
||||
expired), issue one authorization attempt and send the request once
|
||||
again.
|
||||
|
||||
:param client: instance of BaseClient descendant
|
||||
:param method: method of HTTP request
|
||||
:param url: URL of HTTP request
|
||||
:param kwargs: any other parameter that can be passed to
|
||||
`HTTPClient.request`
|
||||
"""
|
||||
|
||||
filter_args = {
|
||||
"endpoint_type": client.endpoint_type or self.endpoint_type,
|
||||
"service_type": client.service_type,
|
||||
}
|
||||
token, endpoint = (self.cached_token, client.cached_endpoint)
|
||||
just_authenticated = False
|
||||
if not (token and endpoint):
|
||||
try:
|
||||
token, endpoint = self.auth_plugin.token_and_endpoint(
|
||||
**filter_args)
|
||||
except exceptions.EndpointException:
|
||||
pass
|
||||
if not (token and endpoint):
|
||||
self.authenticate()
|
||||
just_authenticated = True
|
||||
token, endpoint = self.auth_plugin.token_and_endpoint(
|
||||
**filter_args)
|
||||
if not (token and endpoint):
|
||||
raise exceptions.AuthorizationFailure(
|
||||
_("Cannot find endpoint or token for request"))
|
||||
|
||||
old_token_endpoint = (token, endpoint)
|
||||
kwargs.setdefault("headers", {})["X-Auth-Token"] = token
|
||||
self.cached_token = token
|
||||
client.cached_endpoint = endpoint
|
||||
# Perform the request once. If we get Unauthorized, then it
|
||||
# might be because the auth token expired, so try to
|
||||
# re-authenticate and try again. If it still fails, bail.
|
||||
try:
|
||||
return self.request(
|
||||
method, self.concat_url(endpoint, url), **kwargs)
|
||||
except exceptions.Unauthorized as unauth_ex:
|
||||
if just_authenticated:
|
||||
raise
|
||||
self.cached_token = None
|
||||
client.cached_endpoint = None
|
||||
self.authenticate()
|
||||
try:
|
||||
token, endpoint = self.auth_plugin.token_and_endpoint(
|
||||
**filter_args)
|
||||
except exceptions.EndpointException:
|
||||
raise unauth_ex
|
||||
if (not (token and endpoint) or
|
||||
old_token_endpoint == (token, endpoint)):
|
||||
raise unauth_ex
|
||||
self.cached_token = token
|
||||
client.cached_endpoint = endpoint
|
||||
kwargs["headers"]["X-Auth-Token"] = token
|
||||
return self.request(
|
||||
method, self.concat_url(endpoint, url), **kwargs)
|
||||
|
||||
def add_client(self, base_client_instance):
|
||||
"""Add a new instance of :class:`BaseClient` descendant.
|
||||
|
||||
`self` will store a reference to `base_client_instance`.
|
||||
|
||||
Example:
|
||||
|
||||
>>> def test_clients():
|
||||
... from keystoneclient.auth import keystone
|
||||
... from karborclient.common.apiclient import client
|
||||
... auth = keystone.KeystoneAuthPlugin(
|
||||
... username="user", password="pass", tenant_name="tenant",
|
||||
... auth_url="http://auth:5000/v2.0")
|
||||
... openstack_client = client.HTTPClient(auth)
|
||||
... # create nova client
|
||||
... from novaclient.v1_1 import client
|
||||
... client.Client(openstack_client)
|
||||
... # create keystone client
|
||||
... from keystoneclient.v2_0 import client
|
||||
... client.Client(openstack_client)
|
||||
... # use them
|
||||
... openstack_client.identity.tenants.list()
|
||||
... openstack_client.compute.servers.list()
|
||||
"""
|
||||
service_type = base_client_instance.service_type
|
||||
if service_type and not hasattr(self, service_type):
|
||||
setattr(self, service_type, base_client_instance)
|
||||
|
||||
def authenticate(self):
|
||||
self.auth_plugin.authenticate(self)
|
||||
# Store the authentication results in the keyring for later requests
|
||||
if self.keyring_saver:
|
||||
self.keyring_saver.save(self)
|
||||
|
||||
|
||||
class BaseClient(object):
|
||||
"""Top-level object to access the OpenStack API.
|
||||
|
||||
This client uses :class:`HTTPClient` to send requests. :class:`HTTPClient`
|
||||
will handle a bunch of issues such as authentication.
|
||||
"""
|
||||
|
||||
service_type = None
|
||||
endpoint_type = None # "publicURL" will be used
|
||||
cached_endpoint = None
|
||||
|
||||
def __init__(self, http_client, extensions=None):
|
||||
self.http_client = http_client
|
||||
http_client.add_client(self)
|
||||
|
||||
# Add in any extensions...
|
||||
if extensions:
|
||||
for extension in extensions:
|
||||
if extension.manager_class:
|
||||
setattr(self, extension.name,
|
||||
extension.manager_class(self))
|
||||
|
||||
def client_request(self, method, url, **kwargs):
|
||||
return self.http_client.client_request(
|
||||
self, method, url, **kwargs)
|
||||
|
||||
def head(self, url, **kwargs):
|
||||
return self.client_request("HEAD", url, **kwargs)
|
||||
|
||||
def get(self, url, **kwargs):
|
||||
return self.client_request("GET", url, **kwargs)
|
||||
|
||||
def post(self, url, **kwargs):
|
||||
return self.client_request("POST", url, **kwargs)
|
||||
|
||||
def put(self, url, **kwargs):
|
||||
return self.client_request("PUT", url, **kwargs)
|
||||
|
||||
def delete(self, url, **kwargs):
|
||||
return self.client_request("DELETE", url, **kwargs)
|
||||
|
||||
def patch(self, url, **kwargs):
|
||||
return self.client_request("PATCH", url, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def get_class(api_name, version, version_map):
|
||||
"""Returns the client class for the requested API version
|
||||
|
||||
:param api_name: the name of the API, e.g. 'compute', 'image', etc
|
||||
:param version: the requested API version
|
||||
:param version_map: a dict of client classes keyed by version
|
||||
:rtype: a client class for the requested API version
|
||||
"""
|
||||
try:
|
||||
client_path = version_map[str(version)]
|
||||
except (KeyError, ValueError):
|
||||
msg = _("Invalid %(api_name)s client version '%(version)s'. "
|
||||
"Must be one of: %(version_map)s") % {
|
||||
'api_name': api_name,
|
||||
'version': version,
|
||||
'version_map': ', '.join(version_map.keys())}
|
||||
raise exceptions.UnsupportedVersion(msg)
|
||||
|
||||
return importutils.import_class(client_path)
|
@ -1,462 +0,0 @@
|
||||
# Copyright 2011 Nebula, Inc.
|
||||
# Copyright 2013 Alessio Ababilov
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Exception definitions.
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import sys
|
||||
|
||||
|
||||
from karborclient.i18n import _
|
||||
|
||||
|
||||
class ClientException(Exception):
|
||||
"""The base exception class for all exceptions this library raises."""
|
||||
pass
|
||||
|
||||
|
||||
class MissingArgs(ClientException):
|
||||
"""Supplied arguments are not sufficient for calling a function."""
|
||||
def __init__(self, missing):
|
||||
self.missing = missing
|
||||
msg = _("Missing arguments: %s") % ", ".join(missing)
|
||||
super(MissingArgs, self).__init__(msg)
|
||||
|
||||
|
||||
class ValidationError(ClientException):
|
||||
"""Error in validation on API client side."""
|
||||
pass
|
||||
|
||||
|
||||
class UnsupportedVersion(ClientException):
|
||||
"""User is trying to use an unsupported version of the API."""
|
||||
pass
|
||||
|
||||
|
||||
class CommandError(ClientException):
|
||||
"""Error in CLI tool."""
|
||||
pass
|
||||
|
||||
|
||||
class AuthorizationFailure(ClientException):
|
||||
"""Cannot authorize API client."""
|
||||
pass
|
||||
|
||||
|
||||
class ConnectionRefused(ClientException):
|
||||
"""Cannot connect to API service."""
|
||||
pass
|
||||
|
||||
|
||||
class AuthPluginOptionsMissing(AuthorizationFailure):
|
||||
"""Auth plugin misses some options."""
|
||||
def __init__(self, opt_names):
|
||||
super(AuthPluginOptionsMissing, self).__init__(
|
||||
_("Authentication failed. Missing options: %s") %
|
||||
", ".join(opt_names))
|
||||
self.opt_names = opt_names
|
||||
|
||||
|
||||
class AuthSystemNotFound(AuthorizationFailure):
|
||||
"""User has specified an AuthSystem that is not installed."""
|
||||
def __init__(self, auth_system):
|
||||
super(AuthSystemNotFound, self).__init__(
|
||||
_("AuthSystemNotFound: %s") % repr(auth_system))
|
||||
self.auth_system = auth_system
|
||||
|
||||
|
||||
class NoUniqueMatch(ClientException):
|
||||
"""Multiple entities found instead of one."""
|
||||
pass
|
||||
|
||||
|
||||
class EndpointException(ClientException):
|
||||
"""Something is rotten in Service Catalog."""
|
||||
pass
|
||||
|
||||
|
||||
class EndpointNotFound(EndpointException):
|
||||
"""Could not find requested endpoint in Service Catalog."""
|
||||
pass
|
||||
|
||||
|
||||
class AmbiguousEndpoints(EndpointException):
|
||||
"""Found more than one matching endpoint in Service Catalog."""
|
||||
def __init__(self, endpoints=None):
|
||||
super(AmbiguousEndpoints, self).__init__(
|
||||
_("AmbiguousEndpoints: %s") % repr(endpoints))
|
||||
self.endpoints = endpoints
|
||||
|
||||
|
||||
class HttpError(ClientException):
|
||||
"""The base exception class for all HTTP exceptions."""
|
||||
http_status = 0
|
||||
message = _("HTTP Error")
|
||||
|
||||
def __init__(self, message=None, details=None,
|
||||
response=None, request_id=None,
|
||||
url=None, method=None, http_status=None):
|
||||
self.http_status = http_status or self.http_status
|
||||
self.message = message or self.message
|
||||
self.details = details
|
||||
self.request_id = request_id
|
||||
self.response = response
|
||||
self.url = url
|
||||
self.method = method
|
||||
formatted_string = "%s (HTTP %s)" % (self.message, self.http_status)
|
||||
if request_id:
|
||||
formatted_string += " (Request-ID: %s)" % request_id
|
||||
super(HttpError, self).__init__(formatted_string)
|
||||
|
||||
|
||||
class HTTPRedirection(HttpError):
|
||||
"""HTTP Redirection."""
|
||||
message = _("HTTP Redirection")
|
||||
|
||||
|
||||
class HTTPClientError(HttpError):
|
||||
"""Client-side HTTP error.
|
||||
|
||||
Exception for cases in which the client seems to have erred.
|
||||
"""
|
||||
message = _("HTTP Client Error")
|
||||
|
||||
|
||||
class HttpServerError(HttpError):
|
||||
"""Server-side HTTP error.
|
||||
|
||||
Exception for cases in which the server is aware that it has
|
||||
erred or is incapable of performing the request.
|
||||
"""
|
||||
message = _("HTTP Server Error")
|
||||
|
||||
|
||||
class MultipleChoices(HTTPRedirection):
|
||||
"""HTTP 300 - Multiple Choices.
|
||||
|
||||
Indicates multiple options for the resource that the client may follow.
|
||||
"""
|
||||
|
||||
http_status = 300
|
||||
message = _("Multiple Choices")
|
||||
|
||||
|
||||
class BadRequest(HTTPClientError):
|
||||
"""HTTP 400 - Bad Request.
|
||||
|
||||
The request cannot be fulfilled due to bad syntax.
|
||||
"""
|
||||
http_status = 400
|
||||
message = _("Bad Request")
|
||||
|
||||
|
||||
class Unauthorized(HTTPClientError):
|
||||
"""HTTP 401 - Unauthorized.
|
||||
|
||||
Similar to 403 Forbidden, but specifically for use when authentication
|
||||
is required and has failed or has not yet been provided.
|
||||
"""
|
||||
http_status = 401
|
||||
message = _("Unauthorized")
|
||||
|
||||
|
||||
class PaymentRequired(HTTPClientError):
|
||||
"""HTTP 402 - Payment Required.
|
||||
|
||||
Reserved for future use.
|
||||
"""
|
||||
http_status = 402
|
||||
message = _("Payment Required")
|
||||
|
||||
|
||||
class Forbidden(HTTPClientError):
|
||||
"""HTTP 403 - Forbidden.
|
||||
|
||||
The request was a valid request, but the server is refusing to respond
|
||||
to it.
|
||||
"""
|
||||
http_status = 403
|
||||
message = _("Forbidden")
|
||||
|
||||
|
||||
class NotFound(HTTPClientError):
|
||||
"""HTTP 404 - Not Found.
|
||||
|
||||
The requested resource could not be found but may be available again
|
||||
in the future.
|
||||
"""
|
||||
http_status = 404
|
||||
message = _("Not Found")
|
||||
|
||||
|
||||
class MethodNotAllowed(HTTPClientError):
|
||||
"""HTTP 405 - Method Not Allowed.
|
||||
|
||||
A request was made of a resource using a request method not supported
|
||||
by that resource.
|
||||
"""
|
||||
http_status = 405
|
||||
message = _("Method Not Allowed")
|
||||
|
||||
|
||||
class NotAcceptable(HTTPClientError):
|
||||
"""HTTP 406 - Not Acceptable.
|
||||
|
||||
The requested resource is only capable of generating content not
|
||||
acceptable according to the Accept headers sent in the request.
|
||||
"""
|
||||
http_status = 406
|
||||
message = _("Not Acceptable")
|
||||
|
||||
|
||||
class ProxyAuthenticationRequired(HTTPClientError):
|
||||
"""HTTP 407 - Proxy Authentication Required.
|
||||
|
||||
The client must first authenticate itself with the proxy.
|
||||
"""
|
||||
http_status = 407
|
||||
message = _("Proxy Authentication Required")
|
||||
|
||||
|
||||
class RequestTimeout(HTTPClientError):
|
||||
"""HTTP 408 - Request Timeout.
|
||||
|
||||
The server timed out waiting for the request.
|
||||
"""
|
||||
http_status = 408
|
||||
message = _("Request Timeout")
|
||||
|
||||
|
||||
class Conflict(HTTPClientError):
|
||||
"""HTTP 409 - Conflict.
|
||||
|
||||
Indicates that the request could not be processed because of conflict
|
||||
in the request, such as an edit conflict.
|
||||
"""
|
||||
http_status = 409
|
||||
message = _("Conflict")
|
||||
|
||||
|
||||
class Gone(HTTPClientError):
|
||||
"""HTTP 410 - Gone.
|
||||
|
||||
Indicates that the resource requested is no longer available and will
|
||||
not be available again.
|
||||
"""
|
||||
http_status = 410
|
||||
message = _("Gone")
|
||||
|
||||
|
||||
class LengthRequired(HTTPClientError):
|
||||
"""HTTP 411 - Length Required.
|
||||
|
||||
The request did not specify the length of its content, which is
|
||||
required by the requested resource.
|
||||
"""
|
||||
http_status = 411
|
||||
message = _("Length Required")
|
||||
|
||||
|
||||
class PreconditionFailed(HTTPClientError):
|
||||
"""HTTP 412 - Precondition Failed.
|
||||
|
||||
The server does not meet one of the preconditions that the requester
|
||||
put on the request.
|
||||
"""
|
||||
http_status = 412
|
||||
message = _("Precondition Failed")
|
||||
|
||||
|
||||
class RequestEntityTooLarge(HTTPClientError):
|
||||
"""HTTP 413 - Request Entity Too Large.
|
||||
|
||||
The request is larger than the server is willing or able to process.
|
||||
"""
|
||||
http_status = 413
|
||||
message = _("Request Entity Too Large")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
try:
|
||||
self.retry_after = int(kwargs.pop('retry_after'))
|
||||
except (KeyError, ValueError):
|
||||
self.retry_after = 0
|
||||
|
||||
super(RequestEntityTooLarge, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class RequestUriTooLong(HTTPClientError):
|
||||
"""HTTP 414 - Request-URI Too Long.
|
||||
|
||||
The URI provided was too long for the server to process.
|
||||
"""
|
||||
http_status = 414
|
||||
message = _("Request-URI Too Long")
|
||||
|
||||
|
||||
class UnsupportedMediaType(HTTPClientError):
|
||||
"""HTTP 415 - Unsupported Media Type.
|
||||
|
||||
The request entity has a media type which the server or resource does
|
||||
not support.
|
||||
"""
|
||||
http_status = 415
|
||||
message = _("Unsupported Media Type")
|
||||
|
||||
|
||||
class RequestedRangeNotSatisfiable(HTTPClientError):
|
||||
"""HTTP 416 - Requested Range Not Satisfiable.
|
||||
|
||||
The client has asked for a portion of the file, but the server cannot
|
||||
supply that portion.
|
||||
"""
|
||||
http_status = 416
|
||||
message = _("Requested Range Not Satisfiable")
|
||||
|
||||
|
||||
class ExpectationFailed(HTTPClientError):
|
||||
"""HTTP 417 - Expectation Failed.
|
||||
|
||||
The server cannot meet the requirements of the Expect request-header field.
|
||||
"""
|
||||
http_status = 417
|
||||
message = _("Expectation Failed")
|
||||
|
||||
|
||||
class UnprocessableEntity(HTTPClientError):
|
||||
"""HTTP 422 - Unprocessable Entity.
|
||||
|
||||
The request was well-formed but was unable to be followed due to semantic
|
||||
errors.
|
||||
"""
|
||||
http_status = 422
|
||||
message = _("Unprocessable Entity")
|
||||
|
||||
|
||||
class InternalServerError(HttpServerError):
|
||||
"""HTTP 500 - Internal Server Error.
|
||||
|
||||
A generic error message, given when no more specific message is suitable.
|
||||
"""
|
||||
http_status = 500
|
||||
message = _("Internal Server Error")
|
||||
|
||||
|
||||
# NotImplemented is a python keyword.
|
||||
class HttpNotImplemented(HttpServerError):
|
||||
"""HTTP 501 - Not Implemented.
|
||||
|
||||
The server either does not recognize the request method, or it lacks
|
||||
the ability to fulfill the request.
|
||||
"""
|
||||
http_status = 501
|
||||
message = _("Not Implemented")
|
||||
|
||||
|
||||
class BadGateway(HttpServerError):
|
||||
"""HTTP 502 - Bad Gateway.
|
||||
|
||||
The server was acting as a gateway or proxy and received an invalid
|
||||
response from the upstream server.
|
||||
"""
|
||||
http_status = 502
|
||||
message = _("Bad Gateway")
|
||||
|
||||
|
||||
class ServiceUnavailable(HttpServerError):
|
||||
"""HTTP 503 - Service Unavailable.
|
||||
|
||||
The server is currently unavailable.
|
||||
"""
|
||||
http_status = 503
|
||||
message = _("Service Unavailable")
|
||||
|
||||
|
||||
class GatewayTimeout(HttpServerError):
|
||||
"""HTTP 504 - Gateway Timeout.
|
||||
|
||||
The server was acting as a gateway or proxy and did not receive a timely
|
||||
response from the upstream server.
|
||||
"""
|
||||
http_status = 504
|
||||
message = _("Gateway Timeout")
|
||||
|
||||
|
||||
class HttpVersionNotSupported(HttpServerError):
|
||||
"""HTTP 505 - HttpVersion Not Supported.
|
||||
|
||||
The server does not support the HTTP protocol version used in the request.
|
||||
"""
|
||||
http_status = 505
|
||||
message = _("HTTP Version Not Supported")
|
||||
|
||||
|
||||
# _code_map contains all the classes that have http_status attribute.
|
||||
_code_map = dict(
|
||||
(getattr(obj, 'http_status', None), obj)
|
||||
for name, obj in vars(sys.modules[__name__]).items()
|
||||
if inspect.isclass(obj) and getattr(obj, 'http_status', False)
|
||||
)
|
||||
|
||||
|
||||
def from_response(response, method, url):
|
||||
"""Returns an instance of :class:`HttpError` or subclass based on response.
|
||||
|
||||
:param response: instance of `requests.Response` class
|
||||
:param method: HTTP method used for request
|
||||
:param url: URL used for request
|
||||
"""
|
||||
|
||||
req_id = response.headers.get("x-openstack-request-id")
|
||||
# NOTE(hdd) true for older versions of nova and cinder
|
||||
if not req_id:
|
||||
req_id = response.headers.get("x-compute-request-id")
|
||||
kwargs = {
|
||||
"http_status": response.status_code,
|
||||
"response": response,
|
||||
"method": method,
|
||||
"url": url,
|
||||
"request_id": req_id,
|
||||
}
|
||||
if "retry-after" in response.headers:
|
||||
kwargs["retry_after"] = response.headers["retry-after"]
|
||||
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
if content_type.startswith("application/json"):
|
||||
try:
|
||||
body = response.json()
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
if isinstance(body, dict):
|
||||
error = list(body.values())[0]
|
||||
kwargs["message"] = error.get("message")
|
||||
kwargs["details"] = error.get("details")
|
||||
elif content_type.startswith("text/"):
|
||||
kwargs["details"] = response.text
|
||||
|
||||
try:
|
||||
cls = _code_map[response.status_code]
|
||||
except KeyError:
|
||||
if 500 <= response.status_code < 600:
|
||||
cls = HttpServerError
|
||||
elif 400 <= response.status_code < 500:
|
||||
cls = HTTPClientError
|
||||
else:
|
||||
cls = HttpError
|
||||
return cls(**kwargs)
|
@ -1,177 +0,0 @@
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
A fake server that "responds" to API methods with pre-canned responses.
|
||||
|
||||
All of these responses come from the spec, so if for some reason the spec's
|
||||
wrong the tests might raise AssertionError. I've indicated in comments the
|
||||
places where actual behavior differs from the spec.
|
||||
"""
|
||||
|
||||
# W0102: Dangerous default value %s as argument
|
||||
# pylint: disable=W0102
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
import requests
|
||||
import six
|
||||
from six.moves.urllib import parse
|
||||
|
||||
from karborclient.common.apiclient import client
|
||||
|
||||
|
||||
def assert_has_keys(dct, required=None, optional=None):
|
||||
if required is None:
|
||||
required = []
|
||||
if optional is None:
|
||||
optional = []
|
||||
for k in required:
|
||||
try:
|
||||
assert k in dct
|
||||
except AssertionError:
|
||||
extra_keys = set(dct.keys()).difference(set(required + optional))
|
||||
raise AssertionError("found unexpected keys: %s" %
|
||||
list(extra_keys))
|
||||
|
||||
|
||||
class TestResponse(requests.Response):
|
||||
"""Wrap requests.Response and provide a convenient initialization."""
|
||||
|
||||
def __init__(self, data):
|
||||
super(TestResponse, self).__init__()
|
||||
self._content_consumed = True
|
||||
if isinstance(data, dict):
|
||||
self.status_code = data.get('status_code', 200)
|
||||
# Fake the text attribute to streamline Response creation
|
||||
text = data.get('text', "")
|
||||
if isinstance(text, (dict, list)):
|
||||
self._content = jsonutils.dumps(text)
|
||||
default_headers = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
else:
|
||||
self._content = text
|
||||
default_headers = {}
|
||||
if six.PY3 and isinstance(self._content, six.string_types):
|
||||
self._content = self._content.encode('utf-8', 'strict')
|
||||
self.headers = data.get('headers') or default_headers
|
||||
else:
|
||||
self.status_code = data
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.status_code == other.status_code and
|
||||
self.headers == other.headers and
|
||||
self._content == other._content)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
|
||||
class FakeHTTPClient(client.HTTPClient):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.callstack = []
|
||||
self.fixtures = kwargs.pop("fixtures", None) or {}
|
||||
if not args and "auth_plugin" not in kwargs:
|
||||
args = (None, )
|
||||
super(FakeHTTPClient, self).__init__(*args, **kwargs)
|
||||
|
||||
def assert_called(self, method, url, body=None, pos=-1):
|
||||
"""Assert than an API method was just called."""
|
||||
expected = (method, url)
|
||||
called = self.callstack[pos][0:2]
|
||||
assert self.callstack, \
|
||||
"Expected %s %s but no calls were made." % expected
|
||||
|
||||
assert expected == called, 'Expected %s %s; got %s %s' % \
|
||||
(expected + called)
|
||||
|
||||
if body is not None:
|
||||
if self.callstack[pos][3] != body:
|
||||
raise AssertionError('%r != %r' %
|
||||
(self.callstack[pos][3], body))
|
||||
|
||||
def assert_called_anytime(self, method, url, body=None):
|
||||
"""Assert than an API method was called anytime in the test."""
|
||||
expected = (method, url)
|
||||
|
||||
assert self.callstack, \
|
||||
"Expected %s %s but no calls were made." % expected
|
||||
|
||||
found = False
|
||||
entry = None
|
||||
for entry in self.callstack:
|
||||
if expected == entry[0:2]:
|
||||
found = True
|
||||
break
|
||||
|
||||
assert found, 'Expected %s %s; got %s' % \
|
||||
(method, url, self.callstack)
|
||||
if body is not None:
|
||||
assert entry[3] == body, "%s != %s" % (entry[3], body)
|
||||
|
||||
self.callstack = []
|
||||
|
||||
def clear_callstack(self):
|
||||
self.callstack = []
|
||||
|
||||
def authenticate(self):
|
||||
pass
|
||||
|
||||
def client_request(self, client, method, url, **kwargs):
|
||||
# Check that certain things are called correctly
|
||||
if method in ["GET", "DELETE"]:
|
||||
assert "json" not in kwargs
|
||||
|
||||
# Note the call
|
||||
self.callstack.append(
|
||||
(method,
|
||||
url,
|
||||
kwargs.get("headers") or {},
|
||||
kwargs.get("json") or kwargs.get("data")))
|
||||
try:
|
||||
fixture = self.fixtures[url][method]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
return TestResponse({"headers": fixture[0],
|
||||
"text": fixture[1]})
|
||||
|
||||
# Call the method
|
||||
args = parse.parse_qsl(parse.urlparse(url)[4])
|
||||
kwargs.update(args)
|
||||
munged_url = url.rsplit('?', 1)[0]
|
||||
munged_url = munged_url.strip('/').replace('/', '_').replace('.', '_')
|
||||
munged_url = munged_url.replace('-', '_')
|
||||
|
||||
callback = "%s_%s" % (method.lower(), munged_url)
|
||||
|
||||
if not hasattr(self, callback):
|
||||
raise AssertionError('Called unknown API method: %s %s, '
|
||||
'expected fakes method name: %s' %
|
||||
(method, url, callback))
|
||||
|
||||
resp = getattr(self, callback)(**kwargs)
|
||||
if len(resp) == 3:
|
||||
status, headers, body = resp
|
||||
else:
|
||||
status, body = resp
|
||||
headers = {}
|
||||
return TestResponse({
|
||||
"status_code": status,
|
||||
"text": body,
|
||||
"headers": headers,
|
||||
})
|
@ -1,344 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Base utilities to build API operation managers and objects on top of.
|
||||
"""
|
||||
|
||||
import abc
|
||||
import copy
|
||||
|
||||
import six
|
||||
from six.moves.urllib import parse
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
from karborclient.common import http
|
||||
|
||||
|
||||
SORT_DIR_VALUES = ('asc', 'desc')
|
||||
SORT_KEY_VALUES = ('id', 'status', 'name', 'created_at')
|
||||
SORT_KEY_MAPPINGS = {}
|
||||
|
||||
|
||||
def getid(obj):
|
||||
"""Abstracts the common pattern of allowing both an object or
|
||||
|
||||
an object's ID (UUID) as a parameter when dealing with relationships.
|
||||
"""
|
||||
|
||||
try:
|
||||
return obj.id
|
||||
except AttributeError:
|
||||
return obj
|
||||
|
||||
|
||||
class Manager(object):
|
||||
"""Managers interact with a particular type of API (servers, flavors,
|
||||
|
||||
images, etc.) and provide CRUD operations for them.
|
||||
"""
|
||||
resource_class = None
|
||||
|
||||
def __init__(self, api):
|
||||
self.api = api
|
||||
if isinstance(self.api, http.SessionClient):
|
||||
self.project_id = self.api.get_project_id()
|
||||
else:
|
||||
self.project_id = self.api.project_id
|
||||
|
||||
def _list(self, url, response_key=None, obj_class=None,
|
||||
data=None, headers=None, return_raw=False,):
|
||||
|
||||
if headers is None:
|
||||
headers = {}
|
||||
resp, body = self.api.json_request('GET', url, headers=headers)
|
||||
|
||||
if obj_class is None:
|
||||
obj_class = self.resource_class
|
||||
|
||||
if response_key:
|
||||
if response_key not in body:
|
||||
body[response_key] = []
|
||||
data = body[response_key]
|
||||
else:
|
||||
data = body
|
||||
if return_raw:
|
||||
return data
|
||||
return [obj_class(self, res, loaded=True) for res in data if res]
|
||||
|
||||
def _delete(self, url, headers=None):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
self.api.raw_request('DELETE', url, headers=headers)
|
||||
|
||||
def _update(self, url, data, response_key=None, headers=None):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
resp, body = self.api.json_request('PUT', url, data=data,
|
||||
headers=headers)
|
||||
# PUT requests may not return a body
|
||||
if body:
|
||||
if response_key:
|
||||
return self.resource_class(self, body[response_key])
|
||||
return self.resource_class(self, body)
|
||||
|
||||
def _create(self, url, data=None, response_key=None,
|
||||
return_raw=False, headers=None):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
if data:
|
||||
resp, body = self.api.json_request('POST', url,
|
||||
data=data, headers=headers)
|
||||
else:
|
||||
resp, body = self.api.json_request('POST', url, headers=headers)
|
||||
if return_raw:
|
||||
if response_key:
|
||||
return body[response_key]
|
||||
return body
|
||||
if response_key:
|
||||
return self.resource_class(self, body[response_key])
|
||||
return self.resource_class(self, body)
|
||||
|
||||
def _get(self, url, response_key=None, return_raw=False, headers=None):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
resp, body = self.api.json_request('GET', url, headers=headers)
|
||||
if return_raw:
|
||||
if response_key:
|
||||
return body[response_key]
|
||||
return body
|
||||
if response_key:
|
||||
return self.resource_class(self, body[response_key])
|
||||
return self.resource_class(self, body)
|
||||
|
||||
def _build_list_url(self, resource_type, detailed=False,
|
||||
search_opts=None, marker=None, limit=None,
|
||||
sort_key=None, sort_dir=None, sort=None):
|
||||
|
||||
if search_opts is None:
|
||||
search_opts = {}
|
||||
|
||||
query_params = {}
|
||||
for key, val in search_opts.items():
|
||||
if val:
|
||||
query_params[key] = val
|
||||
|
||||
if marker:
|
||||
query_params['marker'] = marker
|
||||
|
||||
if limit:
|
||||
query_params['limit'] = limit
|
||||
|
||||
if sort:
|
||||
query_params['sort'] = self._format_sort_param(sort)
|
||||
else:
|
||||
# sort_key and sort_dir deprecated in kilo, prefer sort
|
||||
if sort_key:
|
||||
query_params['sort_key'] = self._format_sort_key_param(
|
||||
sort_key)
|
||||
|
||||
if sort_dir:
|
||||
query_params['sort_dir'] = self._format_sort_dir_param(
|
||||
sort_dir)
|
||||
|
||||
# Transform the dict to a sequence of two-element tuples in fixed
|
||||
# order, then the encoded string will be consistent in Python 2&3.
|
||||
query_string = ""
|
||||
if query_params:
|
||||
params = sorted(query_params.items(), key=lambda x: x[0])
|
||||
query_string = "?%s" % parse.urlencode(params)
|
||||
|
||||
detail = ""
|
||||
if detailed:
|
||||
detail = "/detail"
|
||||
|
||||
return ("/%(resource_type)s%(detail)s"
|
||||
"%(query_string)s" %
|
||||
{"resource_type": resource_type, "detail": detail,
|
||||
"query_string": query_string})
|
||||
|
||||
def _format_sort_param(self, sort):
|
||||
'''Formats the sort information into the sort query string parameter.
|
||||
|
||||
The input sort information can be any of the following:
|
||||
- Comma-separated string in the form of <key[:dir]>
|
||||
- List of strings in the form of <key[:dir]>
|
||||
- List of either string keys, or tuples of (key, dir)
|
||||
|
||||
For example, the following import sort values are valid:
|
||||
- 'key1:dir1,key2,key3:dir3'
|
||||
- ['key1:dir1', 'key2', 'key3:dir3']
|
||||
- [('key1', 'dir1'), 'key2', ('key3', dir3')]
|
||||
|
||||
:param sort: Input sort information
|
||||
:returns: Formatted query string parameter or None
|
||||
:raise ValueError: If an invalid sort direction or invalid sort key is
|
||||
given
|
||||
'''
|
||||
if not sort:
|
||||
return None
|
||||
|
||||
if isinstance(sort, six.string_types):
|
||||
# Convert the string into a list for consistent validation
|
||||
sort = [s for s in sort.split(',') if s]
|
||||
|
||||
sort_array = []
|
||||
for sort_item in sort:
|
||||
if isinstance(sort_item, tuple):
|
||||
sort_key = sort_item[0]
|
||||
sort_dir = sort_item[1]
|
||||
else:
|
||||
sort_key, _sep, sort_dir = sort_item.partition(':')
|
||||
sort_key = sort_key.strip()
|
||||
if sort_key in SORT_KEY_VALUES:
|
||||
sort_key = SORT_KEY_MAPPINGS.get(sort_key, sort_key)
|
||||
else:
|
||||
raise ValueError('sort_key must be one of the following: %s.'
|
||||
% ', '.join(SORT_KEY_VALUES))
|
||||
if sort_dir:
|
||||
sort_dir = sort_dir.strip()
|
||||
if sort_dir not in SORT_DIR_VALUES:
|
||||
msg = ('sort_dir must be one of the following: %s.'
|
||||
% ', '.join(SORT_DIR_VALUES))
|
||||
raise ValueError(msg)
|
||||
sort_array.append('%s:%s' % (sort_key, sort_dir))
|
||||
else:
|
||||
sort_array.append(sort_key)
|
||||
return ','.join(sort_array)
|
||||
|
||||
def _format_sort_key_param(self, sort_key):
|
||||
if sort_key in SORT_KEY_VALUES:
|
||||
return SORT_KEY_MAPPINGS.get(sort_key, sort_key)
|
||||
|
||||
msg = ('sort_key must be one of the following: %s.' %
|
||||
', '.join(SORT_KEY_VALUES))
|
||||
raise ValueError(msg)
|
||||
|
||||
def _format_sort_dir_param(self, sort_dir):
|
||||
if sort_dir in SORT_DIR_VALUES:
|
||||
return sort_dir
|
||||
|
||||
msg = ('sort_dir must be one of the following: %s.'
|
||||
% ', '.join(SORT_DIR_VALUES))
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ManagerWithFind(Manager):
|
||||
"""Manager with additional `find()`/`findall()` methods."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def list(self):
|
||||
pass
|
||||
|
||||
def find(self, **kwargs):
|
||||
"""Find a single item with attributes matching ``**kwargs``.
|
||||
|
||||
This isn't very efficient: it loads the entire list then filters on
|
||||
the Python side.
|
||||
"""
|
||||
rl = self.findall(**kwargs)
|
||||
num = len(rl)
|
||||
|
||||
if num == 0:
|
||||
msg = "No %s matching %s." % (self.resource_class.__name__, kwargs)
|
||||
raise exceptions.NotFound(msg)
|
||||
elif num > 1:
|
||||
raise exceptions.NoUniqueMatch
|
||||
else:
|
||||
return self.get(rl[0].id)
|
||||
|
||||
def findall(self, **kwargs):
|
||||
"""Find all items with attributes matching ``**kwargs``.
|
||||
|
||||
This isn't very efficient: it loads the entire list then filters on
|
||||
the Python side.
|
||||
"""
|
||||
found = []
|
||||
searches = kwargs.items()
|
||||
|
||||
for obj in self.list():
|
||||
try:
|
||||
if all(getattr(obj, attr) == value
|
||||
for (attr, value) in searches):
|
||||
found.append(obj)
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
return found
|
||||
|
||||
|
||||
class Resource(object):
|
||||
"""A resource represents a particular instance of an object (tenant, user,
|
||||
|
||||
etc). This is pretty much just a bag for attributes.
|
||||
|
||||
:param manager: Manager object
|
||||
:param info: dictionary representing resource attributes
|
||||
:param loaded: prevent lazy-loading if set to True
|
||||
"""
|
||||
def __init__(self, manager, info, loaded=False):
|
||||
self.manager = manager
|
||||
self._info = info
|
||||
self._add_details(info)
|
||||
self._loaded = loaded
|
||||
|
||||
def _add_details(self, info):
|
||||
for k, v in info.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
def __setstate__(self, d):
|
||||
for k, v in d.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
def __getattr__(self, k):
|
||||
if k not in self.__dict__:
|
||||
# NOTE(bcwaldon): disallow lazy-loading if already loaded once
|
||||
if not self.is_loaded():
|
||||
self.get()
|
||||
return self.__getattr__(k)
|
||||
raise AttributeError(k)
|
||||
else:
|
||||
return self.__dict__[k]
|
||||
|
||||
def __repr__(self):
|
||||
reprkeys = sorted(k for k in self.__dict__.keys() if k[0] != '_' and
|
||||
k != 'manager')
|
||||
info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys)
|
||||
return "<%s %s>" % (self.__class__.__name__, info)
|
||||
|
||||
def get(self):
|
||||
# set_loaded() first ... so if we have to bail, we know we tried.
|
||||
self.set_loaded(True)
|
||||
if not hasattr(self.manager, 'get'):
|
||||
return
|
||||
|
||||
new = self.manager.get(self.id)
|
||||
if new:
|
||||
self._add_details(new._info)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
return self._info == other._info
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def is_loaded(self):
|
||||
return self._loaded
|
||||
|
||||
def set_loaded(self, val):
|
||||
self._loaded = val
|
||||
|
||||
def to_dict(self):
|
||||
return copy.deepcopy(self._info)
|
@ -1,394 +0,0 @@
|
||||
# Copyright 2012 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import hashlib
|
||||
import os
|
||||
import socket
|
||||
|
||||
import keystoneauth1.adapter as keystone_adapter
|
||||
from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import encodeutils
|
||||
import requests
|
||||
import six
|
||||
from six.moves import urllib
|
||||
|
||||
from karborclient.common.apiclient import exceptions as exc
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
USER_AGENT = 'python-karborclient'
|
||||
CHUNKSIZE = 1024 * 64 # 64kB
|
||||
|
||||
|
||||
def get_system_ca_file():
|
||||
"""Return path to system default CA file."""
|
||||
# Standard CA file locations for Debian/Ubuntu, RedHat/Fedora,
|
||||
# Suse, FreeBSD/OpenBSD, MacOSX, and the bundled ca
|
||||
ca_path = ['/etc/ssl/certs/ca-certificates.crt',
|
||||
'/etc/pki/tls/certs/ca-bundle.crt',
|
||||
'/etc/ssl/ca-bundle.pem',
|
||||
'/etc/ssl/cert.pem',
|
||||
'/System/Library/OpenSSL/certs/cacert.pem',
|
||||
requests.certs.where()]
|
||||
for ca in ca_path:
|
||||
LOG.debug("Looking for ca file %s", ca)
|
||||
if os.path.exists(ca):
|
||||
LOG.debug("Using ca file %s", ca)
|
||||
return ca
|
||||
LOG.warning("System ca file could not be found.")
|
||||
|
||||
|
||||
class HTTPClient(object):
|
||||
|
||||
def __init__(self, endpoint, **kwargs):
|
||||
self.endpoint = endpoint
|
||||
self.auth_url = kwargs.get('auth_url')
|
||||
self.auth_token = kwargs.get('token')
|
||||
self.username = kwargs.get('username')
|
||||
self.password = kwargs.get('password')
|
||||
self.region_name = kwargs.get('region_name')
|
||||
self.include_pass = kwargs.get('include_pass')
|
||||
self.project_id = kwargs.get('project_id')
|
||||
self.endpoint_url = endpoint
|
||||
|
||||
self.cert_file = kwargs.get('cert_file')
|
||||
self.key_file = kwargs.get('key_file')
|
||||
self.timeout = kwargs.get('timeout')
|
||||
|
||||
self.ssl_connection_params = {
|
||||
'cacert': kwargs.get('cacert'),
|
||||
'cert_file': kwargs.get('cert_file'),
|
||||
'key_file': kwargs.get('key_file'),
|
||||
'insecure': kwargs.get('insecure'),
|
||||
}
|
||||
|
||||
self.verify_cert = None
|
||||
if urllib.parse.urlparse(endpoint).scheme == "https":
|
||||
if kwargs.get('insecure'):
|
||||
self.verify_cert = False
|
||||
else:
|
||||
self.verify_cert = kwargs.get('cacert', get_system_ca_file())
|
||||
|
||||
def _safe_header(self, name, value):
|
||||
if name in ['X-Auth-Token', 'X-Subject-Token']:
|
||||
# because in python3 byte string handling is ... ug
|
||||
v = value.encode('utf-8')
|
||||
h = hashlib.sha1(v)
|
||||
d = h.hexdigest()
|
||||
return encodeutils.safe_decode(name), "{SHA1}%s" % d
|
||||
else:
|
||||
return (encodeutils.safe_decode(name),
|
||||
encodeutils.safe_decode(value))
|
||||
|
||||
def log_curl_request(self, method, url, kwargs):
|
||||
curl = ['curl -i -X %s' % method]
|
||||
|
||||
for (key, value) in kwargs['headers'].items():
|
||||
header = '-H \'%s: %s\'' % self._safe_header(key, value)
|
||||
curl.append(header)
|
||||
|
||||
conn_params_fmt = [
|
||||
('key_file', '--key %s'),
|
||||
('cert_file', '--cert %s'),
|
||||
('cacert', '--cacert %s'),
|
||||
]
|
||||
for (key, fmt) in conn_params_fmt:
|
||||
value = self.ssl_connection_params.get(key)
|
||||
if value:
|
||||
curl.append(fmt % value)
|
||||
|
||||
if self.ssl_connection_params.get('insecure'):
|
||||
curl.append('-k')
|
||||
|
||||
if 'data' in kwargs:
|
||||
curl.append('-d \'%s\'' % kwargs['data'])
|
||||
|
||||
curl.append('%s%s' % (self.endpoint, url))
|
||||
LOG.debug(' '.join(curl))
|
||||
|
||||
@staticmethod
|
||||
def log_http_response(resp):
|
||||
status = (resp.raw.version / 10.0, resp.status_code, resp.reason)
|
||||
dump = ['\nHTTP/%.1f %s %s' % status]
|
||||
dump.extend(['%s: %s' % (k, v) for k, v in resp.headers.items()])
|
||||
dump.append('')
|
||||
if resp.content:
|
||||
content = resp.content
|
||||
if isinstance(content, six.binary_type):
|
||||
try:
|
||||
content = encodeutils.safe_decode(resp.content)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
else:
|
||||
dump.extend([content, ''])
|
||||
LOG.debug('\n'.join(dump))
|
||||
|
||||
def _http_request(self, url, method, **kwargs):
|
||||
"""Send an http request with the specified characteristics.
|
||||
|
||||
Wrapper around requests.request to handle tasks such
|
||||
as setting headers and error handling.
|
||||
"""
|
||||
# Copy the kwargs so we can reuse the original in case of redirects
|
||||
kwargs['headers'] = copy.deepcopy(kwargs.get('headers', {}))
|
||||
kwargs['headers'].setdefault('User-Agent', USER_AGENT)
|
||||
if self.auth_token:
|
||||
kwargs['headers'].setdefault('X-Auth-Token', self.auth_token)
|
||||
else:
|
||||
kwargs['headers'].update(self.credentials_headers())
|
||||
if self.auth_url:
|
||||
kwargs['headers'].setdefault('X-Auth-Url', self.auth_url)
|
||||
if self.region_name:
|
||||
kwargs['headers'].setdefault('X-Region-Name', self.region_name)
|
||||
|
||||
self.log_curl_request(method, url, kwargs)
|
||||
|
||||
if self.cert_file and self.key_file:
|
||||
kwargs['cert'] = (self.cert_file, self.key_file)
|
||||
|
||||
if self.verify_cert is not None:
|
||||
kwargs['verify'] = self.verify_cert
|
||||
|
||||
if self.timeout is not None:
|
||||
kwargs['timeout'] = float(self.timeout)
|
||||
|
||||
# Allow the option not to follow redirects
|
||||
follow_redirects = kwargs.pop('follow_redirects', True)
|
||||
|
||||
# Since requests does not follow the RFC when doing redirection to sent
|
||||
# back the same method on a redirect we are simply bypassing it. For
|
||||
# example if we do a DELETE/POST/PUT on a URL and we get a 302 RFC says
|
||||
# that we should follow that URL with the same method as before,
|
||||
# requests doesn't follow that and send a GET instead for the method.
|
||||
# Hopefully this could be fixed as they say in a comment in a future
|
||||
# point version i.e.: 3.x
|
||||
# See issue: https://github.com/kennethreitz/requests/issues/1704
|
||||
allow_redirects = False
|
||||
|
||||
try:
|
||||
resp = requests.request(
|
||||
method,
|
||||
self.endpoint_url + url,
|
||||
allow_redirects=allow_redirects,
|
||||
**kwargs)
|
||||
except socket.gaierror as e:
|
||||
message = ("Error finding address for %(url)s: %(e)s" %
|
||||
{'url': self.endpoint_url + url, 'e': e})
|
||||
raise exc.EndpointException(message)
|
||||
except (socket.error,
|
||||
socket.timeout,
|
||||
requests.exceptions.ConnectionError) as e:
|
||||
endpoint = self.endpoint
|
||||
message = ("Error communicating with %(endpoint)s %(e)s" %
|
||||
{'endpoint': endpoint, 'e': e})
|
||||
raise exc.ConnectionRefused(message)
|
||||
|
||||
self.log_http_response(resp)
|
||||
|
||||
if 'X-Auth-Key' not in kwargs['headers'] and \
|
||||
(resp.status_code == 401 or
|
||||
(resp.status_code == 500 and "(HTTP 401)" in resp.content)):
|
||||
raise exc.AuthorizationFailure("Authentication failed. Please try"
|
||||
" again.\n%s"
|
||||
% resp.content)
|
||||
elif 400 <= resp.status_code < 600:
|
||||
raise exc.from_response(resp, method, url)
|
||||
elif resp.status_code in (301, 302, 305):
|
||||
# Redirected. Reissue the request to the new location,
|
||||
# unless caller specified follow_redirects=False
|
||||
if follow_redirects:
|
||||
location = resp.headers.get('location')
|
||||
path = self.strip_endpoint(location)
|
||||
resp = self._http_request(path, method, **kwargs)
|
||||
elif resp.status_code == 300:
|
||||
raise exc.from_response(resp, method, url)
|
||||
|
||||
return resp
|
||||
|
||||
def strip_endpoint(self, location):
|
||||
if location is None:
|
||||
message = "Location not returned with 302"
|
||||
raise exc.EndpointException(message)
|
||||
elif location.startswith(self.endpoint):
|
||||
return location[len(self.endpoint):]
|
||||
else:
|
||||
message = "Prohibited endpoint redirect %s" % location
|
||||
raise exc.EndpointException(message)
|
||||
|
||||
def credentials_headers(self):
|
||||
creds = {}
|
||||
if self.username:
|
||||
creds['X-Auth-User'] = self.username
|
||||
if self.password:
|
||||
creds['X-Auth-Key'] = self.password
|
||||
return creds
|
||||
|
||||
def json_request(self, method, url, content_type='application/json',
|
||||
**kwargs):
|
||||
kwargs.setdefault('headers', {})
|
||||
kwargs['headers'].setdefault('Content-Type', content_type)
|
||||
# Don't set Accept because we aren't always dealing in JSON
|
||||
|
||||
if 'body' in kwargs:
|
||||
if 'data' in kwargs:
|
||||
raise ValueError("Can't provide both 'data' and "
|
||||
"'body' to a request")
|
||||
LOG.warning("Use of 'body' is deprecated; use 'data' instead")
|
||||
kwargs['data'] = kwargs.pop('body')
|
||||
if 'data' in kwargs:
|
||||
kwargs['data'] = jsonutils.dumps(kwargs['data'])
|
||||
|
||||
resp = self._http_request(url, method, **kwargs)
|
||||
body = resp.content
|
||||
|
||||
if body and 'application/json' in resp.headers['content-type']:
|
||||
try:
|
||||
body = resp.json()
|
||||
except ValueError:
|
||||
LOG.error('Could not decode response body as JSON')
|
||||
else:
|
||||
body = None
|
||||
|
||||
return resp, body
|
||||
|
||||
def raw_request(self, method, url, **kwargs):
|
||||
if 'body' in kwargs:
|
||||
if 'data' in kwargs:
|
||||
raise ValueError("Can't provide both 'data' and "
|
||||
"'body' to a request")
|
||||
LOG.warning("Use of 'body' is deprecated; use 'data' instead")
|
||||
kwargs['data'] = kwargs.pop('body')
|
||||
# Chunking happens automatically if 'body' is a
|
||||
# file-like object
|
||||
return self._http_request(url, method, **kwargs)
|
||||
|
||||
def client_request(self, method, url, **kwargs):
|
||||
resp, body = self.json_request(method, url, **kwargs)
|
||||
return resp
|
||||
|
||||
def head(self, url, **kwargs):
|
||||
return self.client_request("HEAD", url, **kwargs)
|
||||
|
||||
def get(self, url, **kwargs):
|
||||
return self.client_request("GET", url, **kwargs)
|
||||
|
||||
def post(self, url, **kwargs):
|
||||
return self.client_request("POST", url, **kwargs)
|
||||
|
||||
def put(self, url, **kwargs):
|
||||
return self.client_request("PUT", url, **kwargs)
|
||||
|
||||
def delete(self, url, **kwargs):
|
||||
return self.raw_request("DELETE", url, **kwargs)
|
||||
|
||||
def patch(self, url, **kwargs):
|
||||
return self.client_request("PATCH", url, **kwargs)
|
||||
|
||||
|
||||
class SessionClient(keystone_adapter.Adapter):
|
||||
"""karbor specific keystoneauth Adapter.
|
||||
|
||||
"""
|
||||
|
||||
def request(self, url, method, **kwargs):
|
||||
raise_exc = kwargs.pop('raise_exc', True)
|
||||
resp = super(SessionClient, self).request(url,
|
||||
method,
|
||||
raise_exc=False,
|
||||
**kwargs)
|
||||
|
||||
if raise_exc and resp.status_code >= 400:
|
||||
LOG.trace("Error communicating with {url}: {exc}"
|
||||
.format(url=url,
|
||||
exc=exc.from_response(resp, method, url)))
|
||||
raise exc.from_response(resp, method, url)
|
||||
|
||||
return resp, resp.text
|
||||
|
||||
def json_request(self, method, url, **kwargs):
|
||||
headers = kwargs.setdefault('headers', {})
|
||||
headers['Content-Type'] = kwargs.pop('content_type',
|
||||
'application/json')
|
||||
if 'body' in kwargs:
|
||||
if 'data' in kwargs:
|
||||
raise ValueError("Can't provide both 'data' and "
|
||||
"'body' to a request")
|
||||
LOG.warning("Use of 'body' is deprecated; use 'data' instead")
|
||||
kwargs['data'] = kwargs.pop('body')
|
||||
if 'data' in kwargs:
|
||||
kwargs['data'] = jsonutils.dumps(kwargs['data'])
|
||||
# NOTE(starodubcevna): We need to prove that json field is empty,
|
||||
# or it will be modified by keystone adapter.
|
||||
kwargs['json'] = None
|
||||
|
||||
resp, body = self.request(url, method, **kwargs)
|
||||
if body:
|
||||
try:
|
||||
body = jsonutils.loads(body)
|
||||
except ValueError:
|
||||
pass
|
||||
return resp, body
|
||||
|
||||
def raw_request(self, method, url, **kwargs):
|
||||
# A non-json request; instead of calling
|
||||
# super.request, need to call the grandparent
|
||||
# adapter.request
|
||||
raise_exc = kwargs.pop('raise_exc', True)
|
||||
if 'body' in kwargs:
|
||||
if 'data' in kwargs:
|
||||
raise ValueError("Can't provide both 'data' and "
|
||||
"'body' to a request")
|
||||
LOG.warning("Use of 'body' is deprecated; use 'data' instead")
|
||||
kwargs['data'] = kwargs.pop('body')
|
||||
resp = keystone_adapter.Adapter.request(self,
|
||||
url,
|
||||
method,
|
||||
raise_exc=False,
|
||||
**kwargs)
|
||||
|
||||
if raise_exc and resp.status_code >= 400:
|
||||
LOG.trace("Error communicating with {url}: {exc}"
|
||||
.format(url=url,
|
||||
exc=exc.from_response(resp, method, url)))
|
||||
raise exc.from_response(resp, method, url)
|
||||
|
||||
return resp
|
||||
|
||||
|
||||
def _construct_http_client(*args, **kwargs):
|
||||
session = kwargs.pop('session', None)
|
||||
auth = kwargs.pop('auth', None)
|
||||
endpoint = next(iter(args), None)
|
||||
|
||||
if session:
|
||||
service_type = kwargs.pop('service_type', None)
|
||||
endpoint_type = kwargs.pop('endpoint_type', None)
|
||||
region_name = kwargs.pop('region_name', None)
|
||||
service_name = kwargs.pop('service_name', None)
|
||||
parameters = {
|
||||
'endpoint_override': endpoint,
|
||||
'session': session,
|
||||
'auth': auth,
|
||||
'interface': endpoint_type,
|
||||
'service_type': service_type,
|
||||
'region_name': region_name,
|
||||
'service_name': service_name,
|
||||
'user_agent': 'python-karborclient',
|
||||
}
|
||||
parameters.update(kwargs)
|
||||
return SessionClient(**parameters)
|
||||
else:
|
||||
return HTTPClient(*args, **kwargs)
|
@ -1,176 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import six
|
||||
import uuid
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import encodeutils
|
||||
|
||||
import prettytable
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
|
||||
|
||||
# Decorator for cli-args
|
||||
def arg(*args, **kwargs):
|
||||
def _decorator(func):
|
||||
# Because of the sematics of decorator composition if we just append
|
||||
# to the options list positional options will appear to be backwards.
|
||||
func.__dict__.setdefault('arguments', []).insert(0, (args, kwargs))
|
||||
return func
|
||||
return _decorator
|
||||
|
||||
|
||||
def env(*vars, **kwargs):
|
||||
"""Search for the first defined of possibly many env vars
|
||||
|
||||
Returns the first environment variable defined in vars, or
|
||||
returns the default defined in kwargs.
|
||||
"""
|
||||
for v in vars:
|
||||
value = os.environ.get(v)
|
||||
if value:
|
||||
return value
|
||||
return kwargs.get('default', '')
|
||||
|
||||
|
||||
def _print(pt, order):
|
||||
if sys.version_info >= (3, 0):
|
||||
print(pt.get_string(sortby=order))
|
||||
else:
|
||||
print(encodeutils.safe_encode(pt.get_string(sortby=order)))
|
||||
|
||||
|
||||
def print_list(objs, fields, exclude_unavailable=False, formatters=None,
|
||||
sortby_index=0):
|
||||
'''Prints a list of objects.
|
||||
|
||||
@param objs: Objects to print
|
||||
@param fields: Fields on each object to be printed
|
||||
@param exclude_unavailable: Boolean to decide if unavailable fields are
|
||||
removed
|
||||
@param formatters: Custom field formatters
|
||||
@param sortby_index: Results sorted against the key in the fields list at
|
||||
this index; if None then the object order is not
|
||||
altered
|
||||
'''
|
||||
formatters = formatters or {}
|
||||
mixed_case_fields = ['serverId']
|
||||
removed_fields = []
|
||||
rows = []
|
||||
|
||||
for o in objs:
|
||||
row = []
|
||||
for field in fields:
|
||||
if field in removed_fields:
|
||||
continue
|
||||
if field in formatters:
|
||||
row.append(formatters[field](o))
|
||||
else:
|
||||
if field in mixed_case_fields:
|
||||
field_name = field.replace(' ', '_')
|
||||
else:
|
||||
field_name = field.lower().replace(' ', '_')
|
||||
if type(o) == dict and field in o:
|
||||
data = o[field]
|
||||
else:
|
||||
if not hasattr(o, field_name) and exclude_unavailable:
|
||||
removed_fields.append(field)
|
||||
continue
|
||||
else:
|
||||
data = getattr(o, field_name, '')
|
||||
if data is None:
|
||||
data = '-'
|
||||
if isinstance(data, six.string_types) and "\r" in data:
|
||||
data = data.replace("\r", " ")
|
||||
row.append(data)
|
||||
rows.append(row)
|
||||
|
||||
for f in removed_fields:
|
||||
fields.remove(f)
|
||||
|
||||
pt = prettytable.PrettyTable((f for f in fields), caching=False)
|
||||
pt.align = 'l'
|
||||
for row in rows:
|
||||
pt.add_row(row)
|
||||
|
||||
if sortby_index is None:
|
||||
order_by = None
|
||||
else:
|
||||
order_by = fields[sortby_index]
|
||||
_print(pt, order_by)
|
||||
|
||||
|
||||
def print_dict(d, property="Property", dict_format_list=None,
|
||||
json_format_list=None):
|
||||
pt = prettytable.PrettyTable([property, 'Value'], caching=False)
|
||||
pt.align = 'l'
|
||||
for r in d.items():
|
||||
r = list(r)
|
||||
if isinstance(r[1], six.string_types) and "\r" in r[1]:
|
||||
r[1] = r[1].replace("\r", " ")
|
||||
if dict_format_list is not None and r[0] in dict_format_list:
|
||||
r[1] = dict_prettyprint(r[1])
|
||||
if json_format_list is not None and r[0] in json_format_list:
|
||||
r[1] = json_prettyprint(r[1])
|
||||
pt.add_row(r)
|
||||
_print(pt, property)
|
||||
|
||||
|
||||
def dict_prettyprint(val):
|
||||
"""dict pretty print formatter.
|
||||
|
||||
:param val: dict.
|
||||
:return: formatted json string.
|
||||
"""
|
||||
return jsonutils.dumps(val, indent=2, sort_keys=True)
|
||||
|
||||
|
||||
def json_prettyprint(val):
|
||||
"""json pretty print formatter.
|
||||
|
||||
:param val: json string.
|
||||
:return: formatted json string.
|
||||
"""
|
||||
return val and jsonutils.dumps(jsonutils.loads(val),
|
||||
indent=2, sort_keys=True)
|
||||
|
||||
|
||||
def find_resource(manager, name_or_id, *args, **kwargs):
|
||||
"""Helper for the _find_* methods."""
|
||||
# first try to get entity as integer id
|
||||
try:
|
||||
if isinstance(name_or_id, int) or name_or_id.isdigit():
|
||||
return manager.get(int(name_or_id), *args, **kwargs)
|
||||
except exceptions.NotFound:
|
||||
pass
|
||||
|
||||
# now try to get entity as uuid
|
||||
try:
|
||||
uuid.UUID(str(name_or_id))
|
||||
return manager.get(name_or_id, *args, **kwargs)
|
||||
except (ValueError, exceptions.NotFound):
|
||||
pass
|
||||
|
||||
# finally try to find entity by name
|
||||
try:
|
||||
return manager.find(name=name_or_id)
|
||||
except exceptions.NotFound:
|
||||
msg = "No %s with a name or ID of '%s' exists." % \
|
||||
(manager.resource_class.__name__.lower(), name_or_id)
|
||||
raise exceptions.CommandError(msg)
|
@ -1,28 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""oslo.i18n integration module.
|
||||
|
||||
See https://docs.openstack.org/oslo.i18n/latest/user/usage.html
|
||||
|
||||
"""
|
||||
|
||||
import oslo_i18n
|
||||
|
||||
_translators = oslo_i18n.TranslatorFactory(domain='karborclient')
|
||||
|
||||
# The primary translation function using the well-known name "_"
|
||||
_ = _translators.primary
|
||||
|
||||
|
||||
def get_available_languages():
|
||||
return oslo_i18n.get_available_languages('karborclient')
|
@ -1,56 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
import logging
|
||||
|
||||
from osc_lib import utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_DATA_PROTECTION_API_VERSION = '1'
|
||||
API_VERSION_OPTION = 'os_data_protection_api_version'
|
||||
API_NAME = 'data_protection'
|
||||
API_VERSIONS = {
|
||||
'1': 'karborclient.v1.client.Client',
|
||||
}
|
||||
|
||||
|
||||
def make_client(instance):
|
||||
"""Returns a data protection service client"""
|
||||
data_protection_client = utils.get_client_class(
|
||||
API_NAME,
|
||||
instance._api_version[API_NAME],
|
||||
API_VERSIONS)
|
||||
LOG.debug('Instantiating data protection client: %s',
|
||||
data_protection_client)
|
||||
client = data_protection_client(
|
||||
auth=instance.auth,
|
||||
session=instance.session,
|
||||
service_type="data-protect"
|
||||
)
|
||||
|
||||
return client
|
||||
|
||||
|
||||
def build_option_parser(parser):
|
||||
"""Hook to add global options"""
|
||||
parser.add_argument(
|
||||
'--os-data-protection-api-version',
|
||||
metavar='<data-protection-api-version>',
|
||||
default=utils.env(
|
||||
'OS_DATA_PROTECTION_API_VERSION',
|
||||
default=DEFAULT_DATA_PROTECTION_API_VERSION),
|
||||
help='Data protection API version, default=' +
|
||||
DEFAULT_DATA_PROTECTION_API_VERSION +
|
||||
' (Env: OS_DATA_PROTECTION_API_VERSION)')
|
||||
return parser
|
@ -1,283 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Data protection V1 checkpoint action implementations"""
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as osc_utils
|
||||
from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
from karborclient.i18n import _
|
||||
from karborclient import utils
|
||||
|
||||
|
||||
def format_checkpoint(checkpoint_info):
|
||||
if 'protection_plan' in checkpoint_info:
|
||||
plan = checkpoint_info['protection_plan']
|
||||
checkpoint_info['protection_plan'] = "Name: %s\nId: %s" % (
|
||||
plan['name'], plan['id'])
|
||||
if 'resource_graph' in checkpoint_info:
|
||||
checkpoint_info['resource_graph'] = jsonutils.dumps(jsonutils.loads(
|
||||
checkpoint_info['resource_graph']), indent=2, sort_keys=True)
|
||||
checkpoint_info.pop("links", None)
|
||||
|
||||
|
||||
class ListCheckpoints(command.Lister):
|
||||
_description = _("List checkpoints.")
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListCheckpoints")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListCheckpoints, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'provider_id',
|
||||
metavar='<provider_id>',
|
||||
help=_('ID of provider.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--all-projects',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_('Include all projects (admin only)'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--plan_id',
|
||||
metavar='<plan_id>',
|
||||
default=None,
|
||||
help=_('Filters results by a plan ID. Default=None.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--start_date',
|
||||
type=str,
|
||||
metavar='<start_date>',
|
||||
default=None,
|
||||
help=_('Filters results by a start date("Y-m-d"). Default=None.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--end_date',
|
||||
type=str,
|
||||
metavar='<end_date>',
|
||||
default=None,
|
||||
help=_('Filters results by a end date("Y-m-d"). Default=None.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--project_id',
|
||||
metavar='<project_id>',
|
||||
default=None,
|
||||
help=_('Filters results by a project ID. Default=None.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<checkpoint>',
|
||||
help=_('The last checkpoint ID of the previous page.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
type=int,
|
||||
metavar='<num-checkpoints>',
|
||||
help=_('Maximum number of checkpoints to display.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar="<key>[:<direction>]",
|
||||
default=None,
|
||||
help=_("Sort output by selected keys and directions(asc or desc), "
|
||||
"multiple keys and directions can be "
|
||||
"specified separated by comma"),
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
data_protection_client = self.app.client_manager.data_protection
|
||||
all_projects = bool(parsed_args.project_id) or parsed_args.all_projects
|
||||
search_opts = {
|
||||
'plan_id': parsed_args.plan_id,
|
||||
'start_date': parsed_args.start_date,
|
||||
'end_date': parsed_args.end_date,
|
||||
'project_id': parsed_args.project_id,
|
||||
'all_tenants': all_projects
|
||||
}
|
||||
|
||||
data = data_protection_client.checkpoints.list(
|
||||
provider_id=parsed_args.provider_id, search_opts=search_opts,
|
||||
marker=parsed_args.marker, limit=parsed_args.limit,
|
||||
sort=parsed_args.sort)
|
||||
|
||||
column_headers = ['Id', 'Project id', 'Status', 'Protection plan',
|
||||
'Metadata', 'Created at']
|
||||
|
||||
def plan_formatter(plan):
|
||||
return "Name: %s\nId: %s" % (plan['name'],
|
||||
plan['id'])
|
||||
formatters = {"Protection plan": plan_formatter}
|
||||
return (column_headers,
|
||||
(osc_utils.get_item_properties(
|
||||
s, column_headers, formatters=formatters
|
||||
) for s in data))
|
||||
|
||||
|
||||
class ShowCheckpoint(command.ShowOne):
|
||||
_description = "Shows checkpoint details"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowCheckpoint, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'provider_id',
|
||||
metavar="<provider_id>",
|
||||
help=_('Id of provider.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'checkpoint_id',
|
||||
metavar="<checkpoint_id>",
|
||||
help=_('Id of checkpoint.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
checkpoint = client.checkpoints.get(parsed_args.provider_id,
|
||||
parsed_args.checkpoint_id)
|
||||
format_checkpoint(checkpoint._info)
|
||||
return zip(*sorted(checkpoint._info.items()))
|
||||
|
||||
|
||||
class CreateCheckpoint(command.ShowOne):
|
||||
_description = "Creates a checkpoint"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreateCheckpoint, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'provider_id',
|
||||
metavar='<provider_id>',
|
||||
help=_('ID of provider.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'plan_id',
|
||||
metavar='<plan_id>',
|
||||
help=_('ID of plan.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--extra_info',
|
||||
type=str,
|
||||
nargs='*',
|
||||
metavar='<key=value>',
|
||||
default=None,
|
||||
help=_('The extra info of a checkpoint.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
checkpoint_extra_info = None
|
||||
if parsed_args.extra_info is not None:
|
||||
checkpoint_extra_info = utils.extract_extra_info(parsed_args)
|
||||
checkpoint = client.checkpoints.create(parsed_args.provider_id,
|
||||
parsed_args.plan_id,
|
||||
checkpoint_extra_info)
|
||||
format_checkpoint(checkpoint._info)
|
||||
return zip(*sorted(checkpoint._info.items()))
|
||||
|
||||
|
||||
class DeleteCheckpoint(command.Command):
|
||||
_description = "Delete checkpoint"
|
||||
|
||||
log = logging.getLogger(__name__ + ".DeleteCheckpoint")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(DeleteCheckpoint, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'provider_id',
|
||||
metavar='<provider_id>',
|
||||
help=_('Id of provider.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'checkpoint',
|
||||
metavar='<checkpoint>',
|
||||
nargs="+",
|
||||
help=_('Id of checkpoint.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
failure_count = 0
|
||||
for checkpoint_id in parsed_args.checkpoint:
|
||||
try:
|
||||
client.checkpoints.delete(parsed_args.provider_id,
|
||||
checkpoint_id)
|
||||
except exceptions.NotFound:
|
||||
failure_count += 1
|
||||
self.log.error(
|
||||
"Failed to delete '{0}'; checkpoint not found".
|
||||
format(checkpoint_id))
|
||||
if failure_count == len(parsed_args.checkpoint):
|
||||
raise exceptions.CommandError(
|
||||
"Unable to find and delete any of the "
|
||||
"specified checkpoint.")
|
||||
|
||||
|
||||
class ResetCheckpointState(command.Command):
|
||||
_description = "Reset checkpoint state"
|
||||
|
||||
log = logging.getLogger(__name__ + ".ResetCheckpointState")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ResetCheckpointState, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'provider_id',
|
||||
metavar='<provider_id>',
|
||||
help=_('Id of provider.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'checkpoint',
|
||||
metavar='<checkpoint>',
|
||||
nargs="+",
|
||||
help=_('Id of checkpoint.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--available',
|
||||
action='store_const', dest='state',
|
||||
default='error', const='available',
|
||||
help=_('Request the checkpoint be reset to "available" state '
|
||||
'instead of "error" state(the default).'),
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
failure_count = 0
|
||||
for checkpoint_id in parsed_args.checkpoint:
|
||||
try:
|
||||
client.checkpoints.reset_state(
|
||||
parsed_args.provider_id, checkpoint_id, parsed_args.state)
|
||||
except exceptions.NotFound:
|
||||
failure_count += 1
|
||||
self.log.error(
|
||||
"Failed to reset state of '{0}'; checkpoint "
|
||||
"not found".format(checkpoint_id))
|
||||
except exceptions.Forbidden:
|
||||
failure_count += 1
|
||||
self.log.error(
|
||||
"Failed to reset state of '{0}'; not "
|
||||
"allowed".format(checkpoint_id))
|
||||
except exceptions.BadRequest:
|
||||
failure_count += 1
|
||||
self.log.error(
|
||||
"Failed to reset state of '{0}'; invalid input or "
|
||||
"current checkpoint state".format(checkpoint_id))
|
||||
if failure_count == len(parsed_args.checkpoint):
|
||||
raise exceptions.CommandError(
|
||||
"Unable to find or reset any of the specified "
|
||||
"checkpoint's state.")
|
@ -1,111 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Data protection V1 operation_log action implementations"""
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as osc_utils
|
||||
from oslo_log import log as logging
|
||||
|
||||
from karborclient.i18n import _
|
||||
|
||||
|
||||
class ListOperationLogs(command.Lister):
|
||||
_description = _("List operation_logs.")
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListOperationLogs")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListOperationLogs, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--all-projects',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_('Include all projects (admin only)'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--status',
|
||||
metavar='<status>',
|
||||
help=_('Filter results by status'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<operation_log>',
|
||||
help=_('The last operation_log ID of the previous page'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
type=int,
|
||||
metavar='<num-operation_logs>',
|
||||
help=_('Maximum number of operation_logs to display'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar="<key>[:<direction>]",
|
||||
default=None,
|
||||
help=_("Sort output by selected keys and directions(asc or desc), "
|
||||
"multiple keys and directions can be "
|
||||
"specified separated by comma"),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--project',
|
||||
metavar='<project>',
|
||||
help=_('Filter results by a project(admin only)')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
data_protection_client = self.app.client_manager.data_protection
|
||||
all_projects = bool(parsed_args.project) or parsed_args.all_projects
|
||||
|
||||
search_opts = {
|
||||
'all_tenants': all_projects,
|
||||
'project_id': parsed_args.project,
|
||||
'status': parsed_args.status,
|
||||
}
|
||||
|
||||
data = data_protection_client.operation_logs.list(
|
||||
search_opts=search_opts, marker=parsed_args.marker,
|
||||
limit=parsed_args.limit, sort=parsed_args.sort)
|
||||
|
||||
column_headers = ['Id', 'Operation Type', 'Checkpoint id',
|
||||
'Plan Id', 'Provider id', 'Restore Id',
|
||||
'Scheduled Operation Id', 'Status',
|
||||
'Started At', 'Ended At', 'Error Info',
|
||||
'Extra Info']
|
||||
|
||||
return (column_headers,
|
||||
(osc_utils.get_item_properties(
|
||||
s, column_headers
|
||||
) for s in data))
|
||||
|
||||
|
||||
class ShowOperationLog(command.ShowOne):
|
||||
_description = "Shows operation_log details"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowOperationLog, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'operation_log',
|
||||
metavar="<operation_log>",
|
||||
help=_('The UUID of the operation_log.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
operation_log = osc_utils.find_resource(client.operation_logs,
|
||||
parsed_args.operation_log)
|
||||
|
||||
operation_log._info.pop("links", None)
|
||||
return zip(*sorted(operation_log._info.items()))
|
@ -1,282 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Data protection V1 plan action implementations"""
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as osc_utils
|
||||
from oslo_log import log as logging
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
from karborclient.i18n import _
|
||||
from karborclient import utils
|
||||
|
||||
|
||||
def format_plan(plan_info):
|
||||
for key in ('resources', 'parameters'):
|
||||
if key not in plan_info:
|
||||
continue
|
||||
plan_info[key] = jsonutils.dumps(plan_info[key],
|
||||
indent=2, sort_keys=True)
|
||||
plan_info.pop("links", None)
|
||||
|
||||
|
||||
class ListPlans(command.Lister):
|
||||
_description = _("List plans.")
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListPlans")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListPlans, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--all-projects',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_('Include all projects (admin only)'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--name',
|
||||
metavar='<name>',
|
||||
help=_('Filter results by plan name'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--description',
|
||||
metavar='<description>',
|
||||
help=_('Filter results by plan description'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--status',
|
||||
metavar='<status>',
|
||||
help=_('Filter results by status'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<plan>',
|
||||
help=_('The last plan ID of the previous page'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
type=int,
|
||||
metavar='<num-plans>',
|
||||
help=_('Maximum number of plans to display'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar="<key>[:<direction>]",
|
||||
default=None,
|
||||
help=_("Sort output by selected keys and directions(asc or desc) "
|
||||
"(default: name:asc), multiple keys and directions can be "
|
||||
"specified separated by comma"),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--project',
|
||||
metavar='<project>',
|
||||
help=_('Filter results by a project(admin only)')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
data_protection_client = self.app.client_manager.data_protection
|
||||
all_projects = bool(parsed_args.project) or parsed_args.all_projects
|
||||
|
||||
search_opts = {
|
||||
'all_tenants': all_projects,
|
||||
'project_id': parsed_args.project,
|
||||
'name': parsed_args.name,
|
||||
'description': parsed_args.description,
|
||||
'status': parsed_args.status,
|
||||
}
|
||||
|
||||
data = data_protection_client.plans.list(
|
||||
search_opts=search_opts, marker=parsed_args.marker,
|
||||
limit=parsed_args.limit, sort=parsed_args.sort)
|
||||
|
||||
column_headers = ['Id', 'Name', 'Description', 'Provider id', 'Status']
|
||||
|
||||
return (column_headers,
|
||||
(osc_utils.get_item_properties(
|
||||
s, column_headers
|
||||
) for s in data))
|
||||
|
||||
|
||||
class ShowPlan(command.ShowOne):
|
||||
_description = "Shows plan details"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowPlan, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'plan',
|
||||
metavar="<plan>",
|
||||
help=_('The UUID of the plan.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
plan = osc_utils.find_resource(client.plans, parsed_args.plan)
|
||||
|
||||
format_plan(plan._info)
|
||||
return zip(*sorted(plan._info.items()))
|
||||
|
||||
|
||||
class CreatePlan(command.ShowOne):
|
||||
_description = "Creates a plan"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreatePlan, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'name',
|
||||
metavar='<name>',
|
||||
help=_('The name of the plan.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'provider_id',
|
||||
metavar='<provider_id>',
|
||||
help=_('The UUID of the provider.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'resources',
|
||||
metavar='<id=type=name=extra_info,id=type=name=extra_info>',
|
||||
help=_('Resource in list must be a dict when creating'
|
||||
' a plan. The keys of resource are id ,type, name and '
|
||||
'extra_info. The extra_info field is optional.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--parameters-json',
|
||||
type=str,
|
||||
dest='parameters_json',
|
||||
metavar='<parameters>',
|
||||
default=None,
|
||||
help=_('Plan parameters in json format.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--parameters',
|
||||
action='append',
|
||||
metavar='resource_type=<type>[,resource_id=<id>,key=val,...]',
|
||||
default=[],
|
||||
help=_('Plan parameters, may be specified multiple times. '
|
||||
'resource_type: type of resource to apply parameters. '
|
||||
'resource_id: limit the parameters to a specific resource. '
|
||||
'Other keys and values: according to provider\'s protect '
|
||||
'schema.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--description',
|
||||
metavar='<description>',
|
||||
help=_('The description of the plan.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
if not uuidutils.is_uuid_like(parsed_args.provider_id):
|
||||
raise exceptions.CommandError(
|
||||
"Invalid provider id provided.")
|
||||
plan_resources = utils.extract_resources(parsed_args)
|
||||
utils.check_resources(client, plan_resources)
|
||||
plan_parameters = utils.extract_parameters(parsed_args)
|
||||
plan = client.plans.create(parsed_args.name, parsed_args.provider_id,
|
||||
plan_resources, plan_parameters,
|
||||
description=parsed_args.description)
|
||||
|
||||
format_plan(plan._info)
|
||||
return zip(*sorted(plan._info.items()))
|
||||
|
||||
|
||||
class UpdatePlan(command.ShowOne):
|
||||
_description = "Update a plan"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(UpdatePlan, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
"plan_id",
|
||||
metavar="<PLAN ID>",
|
||||
help=_("Id of plan to update.")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
metavar="<name>",
|
||||
help=_("A name to which the plan will be renamed.")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--description",
|
||||
metavar="<description>",
|
||||
help=_("Description to which the plan will be updated.")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--resources",
|
||||
metavar="<id=type=name,id=type=name>",
|
||||
help=_("Resources to which the plan will be updated.")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--status",
|
||||
metavar="<suspended|started>",
|
||||
help=_("status to which the plan will be updated.")
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
data = {}
|
||||
if parsed_args.name is not None:
|
||||
data['name'] = parsed_args.name
|
||||
if parsed_args.description is not None:
|
||||
data['description'] = parsed_args.description
|
||||
if parsed_args.resources is not None:
|
||||
plan_resources = utils.extract_resources(parsed_args)
|
||||
data['resources'] = plan_resources
|
||||
if parsed_args.status is not None:
|
||||
data['status'] = parsed_args.status
|
||||
try:
|
||||
plan = osc_utils.find_resource(client.plans,
|
||||
parsed_args.plan_id)
|
||||
plan = client.plans.update(plan.id, data)
|
||||
except exceptions.NotFound:
|
||||
raise exceptions.CommandError(
|
||||
"Plan %s not found" % parsed_args.plan_id)
|
||||
else:
|
||||
format_plan(plan._info)
|
||||
return zip(*sorted(plan._info.items()))
|
||||
|
||||
|
||||
class DeletePlan(command.Command):
|
||||
_description = "Delete plan"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(DeletePlan, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'plan',
|
||||
metavar='<plan>',
|
||||
nargs="+",
|
||||
help=_('ID of plan.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
failure_count = 0
|
||||
for plan_id in parsed_args.plan:
|
||||
try:
|
||||
plan = osc_utils.find_resource(client.plans, plan_id)
|
||||
client.plans.delete(plan.id)
|
||||
except exceptions.NotFound:
|
||||
failure_count += 1
|
||||
print("Failed to delete '{0}'; plan not "
|
||||
"found".format(plan_id))
|
||||
if failure_count == len(parsed_args.plan):
|
||||
raise exceptions.CommandError(
|
||||
"Unable to find and delete any of the "
|
||||
"specified plan.")
|
@ -1,198 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Data protection V1 protectables action implementations"""
|
||||
|
||||
import functools
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as osc_utils
|
||||
from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from karborclient.i18n import _
|
||||
from karborclient import utils
|
||||
|
||||
|
||||
class ListProtectables(command.Lister):
|
||||
_description = _("List protectable types.")
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListProtectables")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListProtectables, self).get_parser(prog_name)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
data_protection_client = self.app.client_manager.data_protection
|
||||
|
||||
data = data_protection_client.protectables.list()
|
||||
|
||||
column_headers = ['Protectable type']
|
||||
|
||||
return (column_headers,
|
||||
(osc_utils.get_item_properties(
|
||||
s, column_headers
|
||||
) for s in data))
|
||||
|
||||
|
||||
class ShowProtectable(command.ShowOne):
|
||||
_description = "Shows protectable type details"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowProtectable, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'protectable_type',
|
||||
metavar="<protectable_type>",
|
||||
help=_('Protectable type.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
protectable = osc_utils.find_resource(client.protectables,
|
||||
parsed_args.protectable_type)
|
||||
|
||||
protectable._info.pop("links", None)
|
||||
if 'dependent_types' in protectable._info:
|
||||
protectable._info['dependent_types'] = "\n".join(
|
||||
protectable._info['dependent_types'])
|
||||
return zip(*sorted(protectable._info.items()))
|
||||
|
||||
|
||||
class ListProtectableInstances(command.Lister):
|
||||
_description = _("List protectable instances.")
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListProtectableInstances")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListProtectableInstances, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'protectable_type',
|
||||
metavar="<protectable_type>",
|
||||
help=_('Type of protectable.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--type',
|
||||
metavar="<type>",
|
||||
default=None,
|
||||
help=_('Filters results by protectable type. Default=None.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar="<protectable_instance>",
|
||||
default=None,
|
||||
help=_('The last protectable instance ID of the previous page.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
metavar="<num-protectable_instances>",
|
||||
default=None,
|
||||
help=_('Maximum number of protectable instances to display.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar="<key>[:<direction>]",
|
||||
default=None,
|
||||
help=_("Sort output by selected keys and directions(asc or desc), "
|
||||
"multiple keys and directions can be "
|
||||
"specified separated by comma"),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--parameters',
|
||||
type=str,
|
||||
nargs='*',
|
||||
metavar="<key=value>",
|
||||
default=None,
|
||||
help=_('List instances by parameters key and value pair. '
|
||||
'Default=None.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
data_protection_client = self.app.client_manager.data_protection
|
||||
|
||||
search_opts = {
|
||||
'type': parsed_args.type,
|
||||
'parameters': (utils.extract_instances_parameters(parsed_args)
|
||||
if parsed_args.parameters else None),
|
||||
}
|
||||
|
||||
data = data_protection_client.protectables.list_instances(
|
||||
parsed_args.protectable_type, search_opts=search_opts,
|
||||
marker=parsed_args.marker, limit=parsed_args.limit,
|
||||
sort=parsed_args.sort)
|
||||
|
||||
column_headers = ['Id', 'Type', 'Name', 'Dependent resources',
|
||||
'Extra info']
|
||||
|
||||
json_dumps = functools.partial(jsonutils.dumps,
|
||||
indent=2, sort_keys=True)
|
||||
formatters = {
|
||||
"Extra info": json_dumps,
|
||||
"Dependent resources": json_dumps,
|
||||
}
|
||||
return (column_headers,
|
||||
(osc_utils.get_item_properties(
|
||||
s, column_headers, formatters=formatters,
|
||||
) for s in data))
|
||||
|
||||
|
||||
class ShowProtectableInstance(command.ShowOne):
|
||||
_description = "Shows protectable instance details"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowProtectableInstance, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'protectable_type',
|
||||
metavar="<protectable_type>",
|
||||
help=_('Protectable type.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'protectable_id',
|
||||
metavar="<protectable_id>",
|
||||
help=_('Protectable instance id.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--parameters',
|
||||
type=str,
|
||||
nargs='*',
|
||||
metavar="<key=value>",
|
||||
default=None,
|
||||
help=_('Show a instance by parameters key and value pair. '
|
||||
'Default=None.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
|
||||
search_opts = {
|
||||
'parameters': (utils.extract_instances_parameters(parsed_args)
|
||||
if parsed_args.parameters else None),
|
||||
}
|
||||
|
||||
instance = client.protectables.get_instance(
|
||||
parsed_args.protectable_type,
|
||||
parsed_args.protectable_id,
|
||||
search_opts=search_opts)
|
||||
|
||||
json_dumps = functools.partial(jsonutils.dumps,
|
||||
indent=2, sort_keys=True)
|
||||
instance._info.pop("links", None)
|
||||
for key in ('extra_info', 'dependent_resources'):
|
||||
if key not in instance._info:
|
||||
continue
|
||||
instance._info[key] = json_dumps(instance._info[key])
|
||||
|
||||
return zip(*sorted(instance._info.items()))
|
@ -1,105 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Data protection V1 provider action implementations"""
|
||||
|
||||
import functools
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as osc_utils
|
||||
from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from karborclient.i18n import _
|
||||
|
||||
|
||||
class ListProviders(command.Lister):
|
||||
_description = _("List providers.")
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListProviders")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListProviders, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--name',
|
||||
metavar='<name>',
|
||||
help=_('Filters results by a name. Default=None.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--description',
|
||||
metavar='<description>',
|
||||
help=_('Filters results by a description. Default=None.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<provider>',
|
||||
help=_('The last provider ID of the previous page'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
type=int,
|
||||
metavar='<num-providers>',
|
||||
help=_('Maximum number of providers to display'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar="<key>[:<direction>]",
|
||||
default=None,
|
||||
help=_("Sort output by selected keys and directions(asc or desc) "
|
||||
"(default: name:asc), multiple keys and directions can be "
|
||||
"specified separated by comma"),
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
data_protection_client = self.app.client_manager.data_protection
|
||||
|
||||
search_opts = {
|
||||
'name': parsed_args.name,
|
||||
'description': parsed_args.description,
|
||||
}
|
||||
|
||||
data = data_protection_client.providers.list(
|
||||
search_opts=search_opts, marker=parsed_args.marker,
|
||||
limit=parsed_args.limit, sort=parsed_args.sort)
|
||||
|
||||
column_headers = ['Id', 'Name', 'Description']
|
||||
|
||||
return (column_headers,
|
||||
(osc_utils.get_item_properties(
|
||||
s, column_headers
|
||||
) for s in data))
|
||||
|
||||
|
||||
class ShowProvider(command.ShowOne):
|
||||
_description = "Shows provider details"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowProvider, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'provider',
|
||||
metavar="<provider>",
|
||||
help=_('The UUID of the provider.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
provider = osc_utils.find_resource(client.providers,
|
||||
parsed_args.provider)
|
||||
json_dumps = functools.partial(jsonutils.dumps,
|
||||
indent=2, sort_keys=True)
|
||||
provider._info.pop("links", None)
|
||||
if 'extended_info_schema' in provider._info:
|
||||
provider._info['extended_info_schema'] = json_dumps(
|
||||
provider._info['extended_info_schema'])
|
||||
return zip(*sorted(provider._info.items()))
|
@ -1,75 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Data protection V1 quota classes action implementations"""
|
||||
|
||||
from osc_lib.command import command
|
||||
|
||||
|
||||
def quota_class_set_pretty_show(quota_classes):
|
||||
"""Convert quotas class object to dict and display."""
|
||||
|
||||
new_quota_classes = []
|
||||
for quota_k, quota_v in sorted(quota_classes.to_dict().items()):
|
||||
if isinstance(quota_v, dict):
|
||||
quota_v = '\n'.join(
|
||||
['%s = %s' % (k, v) for k, v in sorted(quota_v.items())])
|
||||
new_quota_classes.append((quota_k, quota_v))
|
||||
|
||||
return new_quota_classes
|
||||
|
||||
|
||||
class ShowQuotaClasses(command.ShowOne):
|
||||
_description = "Shows Quota classes."
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowQuotaClasses, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'class_name',
|
||||
metavar='<class_name>',
|
||||
help='Name of quota class to list the quotas for.')
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
result = client.quota_classes.get(parsed_args.class_name)
|
||||
quota_classes = quota_class_set_pretty_show(result)
|
||||
return zip(*sorted(quota_classes))
|
||||
|
||||
|
||||
class UpdateQuotaClasses(command.ShowOne):
|
||||
_description = "Update the quotas for a quota class (Admin only)."
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(UpdateQuotaClasses, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'class_name',
|
||||
metavar='<class_name>',
|
||||
help='Name of quota class to set the quotas for.')
|
||||
parser.add_argument(
|
||||
'--plans',
|
||||
metavar='<plans>',
|
||||
type=int,
|
||||
default=None,
|
||||
help='New value for the "plans" quota.')
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
class_name = parsed_args.class_name
|
||||
data = {
|
||||
"plans": parsed_args.plans,
|
||||
}
|
||||
result = client.quota_classes.update(class_name, data)
|
||||
quota_classes = quota_class_set_pretty_show(result)
|
||||
|
||||
return zip(*sorted(quota_classes))
|
@ -1,108 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Data protection V1 quotas action implementations"""
|
||||
|
||||
from osc_lib.command import command
|
||||
|
||||
|
||||
def quota_set_pretty_show(quotas):
|
||||
"""Convert quotas object to dict and display."""
|
||||
|
||||
new_quotas = []
|
||||
for quota_k, quota_v in sorted(quotas.to_dict().items()):
|
||||
if isinstance(quota_v, dict):
|
||||
quota_v = '\n'.join(
|
||||
['%s = %s' % (k, v) for k, v in sorted(quota_v.items())])
|
||||
new_quotas.append((quota_k, quota_v))
|
||||
|
||||
return new_quotas
|
||||
|
||||
|
||||
class ShowQuotas(command.ShowOne):
|
||||
_description = "Shows Quotas"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowQuotas, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--tenant',
|
||||
metavar='<tenant>',
|
||||
default=None,
|
||||
help='ID of tenant to list the quotas for.')
|
||||
parser.add_argument(
|
||||
'--detail',
|
||||
action='store_true',
|
||||
help='Optional flag to indicate whether to show quota in detail. '
|
||||
'Default false.')
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
project_id = parsed_args.tenant or client.http_client.get_project_id()
|
||||
kwargs = {
|
||||
"project_id": project_id,
|
||||
"detail": parsed_args.detail,
|
||||
}
|
||||
result = client.quotas.get(**kwargs)
|
||||
quotas = quota_set_pretty_show(result)
|
||||
return zip(*sorted(quotas))
|
||||
|
||||
|
||||
class ShowDefaultQuotas(command.ShowOne):
|
||||
_description = "Shows default Quotas"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowDefaultQuotas, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--tenant',
|
||||
metavar='<tenant>',
|
||||
default=None,
|
||||
help='ID of tenant to list the quotas for.')
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
project_id = parsed_args.tenant or client.http_client.get_project_id()
|
||||
|
||||
result = client.quotas.defaults(project_id)
|
||||
quotas = quota_set_pretty_show(result)
|
||||
|
||||
return zip(*sorted(quotas))
|
||||
|
||||
|
||||
class UpdateQuotas(command.ShowOne):
|
||||
_description = "Updates Quotas"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(UpdateQuotas, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'tenant',
|
||||
metavar='<tenant>',
|
||||
help='ID of tenant to set the quotas for.')
|
||||
parser.add_argument(
|
||||
'--plans',
|
||||
metavar='<plans>',
|
||||
type=int,
|
||||
default=None,
|
||||
help='New value for the "plans" quota.')
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
project_id = parsed_args.tenant
|
||||
data = {
|
||||
"plans": parsed_args.plans,
|
||||
}
|
||||
result = client.quotas.update(project_id, data)
|
||||
quotas = quota_set_pretty_show(result)
|
||||
|
||||
return zip(*sorted(quotas))
|
@ -1,214 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Data protection V1 restore action implementations"""
|
||||
|
||||
import functools
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as osc_utils
|
||||
from oslo_log import log as logging
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
from karborclient.i18n import _
|
||||
from karborclient import utils
|
||||
|
||||
|
||||
def format_restore(restore_info):
|
||||
for key in ('parameters', 'resources_status',
|
||||
'resources_reason'):
|
||||
if key not in restore_info:
|
||||
continue
|
||||
restore_info[key] = jsonutils.dumps(restore_info[key],
|
||||
indent=2, sort_keys=True)
|
||||
restore_info.pop("links", None)
|
||||
|
||||
|
||||
class ListRestores(command.Lister):
|
||||
_description = _("List restores.")
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListRestores")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListRestores, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--all-projects',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_('Include all projects (admin only)'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--status',
|
||||
metavar='<status>',
|
||||
help=_('Filter results by status'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<restore>',
|
||||
help=_('The last restore ID of the previous page'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
type=int,
|
||||
metavar='<num-restores>',
|
||||
help=_('Maximum number of restores to display'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar="<key>[:<direction>]",
|
||||
default=None,
|
||||
help=_("Sort output by selected keys and directions(asc or desc), "
|
||||
"multiple keys and directions can be "
|
||||
"specified separated by comma"),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--project',
|
||||
metavar='<project>',
|
||||
help=_('Filter results by a project(admin only)')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
data_protection_client = self.app.client_manager.data_protection
|
||||
all_projects = bool(parsed_args.project) or parsed_args.all_projects
|
||||
|
||||
search_opts = {
|
||||
'all_tenants': all_projects,
|
||||
'project_id': parsed_args.project,
|
||||
'status': parsed_args.status,
|
||||
}
|
||||
|
||||
data = data_protection_client.restores.list(
|
||||
search_opts=search_opts, marker=parsed_args.marker,
|
||||
limit=parsed_args.limit, sort=parsed_args.sort)
|
||||
|
||||
column_headers = ['Id', 'Project id', 'Provider id', 'Checkpoint id',
|
||||
'Restore target', 'Parameters', 'Status']
|
||||
|
||||
json_dumps = functools.partial(jsonutils.dumps,
|
||||
indent=2,
|
||||
sort_keys=True)
|
||||
formatters = {
|
||||
"Parameters": json_dumps,
|
||||
}
|
||||
return (column_headers,
|
||||
(osc_utils.get_item_properties(
|
||||
s, column_headers, formatters=formatters,
|
||||
) for s in data))
|
||||
|
||||
|
||||
class ShowRestore(command.ShowOne):
|
||||
_description = "Shows restore details"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowRestore, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'restore',
|
||||
metavar="<restore>",
|
||||
help=_('The UUID of the restore.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
restore = osc_utils.find_resource(client.restores, parsed_args.restore)
|
||||
|
||||
format_restore(restore._info)
|
||||
return zip(*sorted(restore._info.items()))
|
||||
|
||||
|
||||
class CreateRestore(command.ShowOne):
|
||||
_description = "Creates a restore"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreateRestore, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'provider_id',
|
||||
metavar='<provider_id>',
|
||||
help=_('The UUID of the provider.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'checkpoint_id',
|
||||
metavar='<checkpoint_id>',
|
||||
help=_('The UUID of the checkpoint.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--restore_target',
|
||||
metavar='<restore_target>',
|
||||
help=_('The target of the restore operation.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--restore_username',
|
||||
metavar='<restore_username>',
|
||||
default=None,
|
||||
help=_('Username to restore target.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--restore_password',
|
||||
metavar='<restore_password>',
|
||||
default=None,
|
||||
help=_('Password to restore target.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--parameters-json',
|
||||
type=str,
|
||||
dest='parameters_json',
|
||||
metavar='<parameters>',
|
||||
default=None,
|
||||
help=_('Restore parameters in json format.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--parameters',
|
||||
action='append',
|
||||
metavar='resource_type=<type>[,resource_id=<id>,key=val,...]',
|
||||
default=[],
|
||||
help=_("Restore parameters, may be specified multiple times. "
|
||||
"resource_type: type of resource to apply parameters. "
|
||||
"resource_id: limit the parameters to a specific resource. "
|
||||
"Other keys and values: according to provider\'s "
|
||||
"restore schema.")
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
if not uuidutils.is_uuid_like(parsed_args.provider_id):
|
||||
raise exceptions.CommandError(
|
||||
"Invalid provider id provided.")
|
||||
if not uuidutils.is_uuid_like(parsed_args.checkpoint_id):
|
||||
raise exceptions.CommandError(
|
||||
"Invalid checkpoint id provided.")
|
||||
|
||||
restore_parameters = utils.extract_parameters(parsed_args)
|
||||
restore_auth = None
|
||||
if parsed_args.restore_target is not None:
|
||||
if parsed_args.restore_username is None:
|
||||
raise exceptions.CommandError(
|
||||
"Must specify username for restore_target.")
|
||||
if parsed_args.restore_password is None:
|
||||
raise exceptions.CommandError(
|
||||
"Must specify password for restore_target.")
|
||||
restore_auth = {
|
||||
'type': 'password',
|
||||
'username': parsed_args.restore_username,
|
||||
'password': parsed_args.restore_password,
|
||||
}
|
||||
restore = client.restores.create(parsed_args.provider_id,
|
||||
parsed_args.checkpoint_id,
|
||||
parsed_args.restore_target,
|
||||
restore_parameters, restore_auth)
|
||||
format_restore(restore._info)
|
||||
return zip(*sorted(restore._info.items()))
|
@ -1,222 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Data protection V1 scheduled_operations action implementations"""
|
||||
|
||||
import functools
|
||||
import six
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as osc_utils
|
||||
from oslo_log import log as logging
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
from karborclient.i18n import _
|
||||
|
||||
|
||||
def format_scheduledoperation(scheduledoperation_info):
|
||||
for key in ('operation_definition', ):
|
||||
if key not in scheduledoperation_info:
|
||||
continue
|
||||
scheduledoperation_info[key] = jsonutils.dumps(
|
||||
scheduledoperation_info[key], indent=2, sort_keys=True)
|
||||
scheduledoperation_info.pop("links", None)
|
||||
|
||||
|
||||
class ListScheduledOperations(command.Lister):
|
||||
_description = _("List scheduled_operations.")
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListScheduledOperations")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListScheduledOperations, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--all-projects',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_('Shows details for all tenants. Admin only.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--name',
|
||||
metavar='<name>',
|
||||
help=_('Filters results by a name. Default=None.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--operation_type',
|
||||
metavar='<operation_type>',
|
||||
default=None,
|
||||
help=_('Filters results by a type. Default=None.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--trigger_id',
|
||||
metavar='<trigger_id>',
|
||||
default=None,
|
||||
help=_('Filters results by a trigger id. Default=None.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--operation_definition',
|
||||
metavar='<operation_definition>',
|
||||
default=None,
|
||||
help=_('Filters results by a operation definition. Default=None.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<scheduled_operations>',
|
||||
help=_('The last scheduled_operations ID of the previous page'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
type=int,
|
||||
metavar='<num-scheduled_operations>',
|
||||
help=_('Maximum number of scheduled_operations to display'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar="<key>[:<direction>]",
|
||||
default=None,
|
||||
help=_("Sort output by selected keys and directions(asc or desc) "
|
||||
"(default: name:asc), multiple keys and directions can be "
|
||||
"specified separated by comma"),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--project',
|
||||
metavar='<project>',
|
||||
help=_('Filter results by a project(admin only)')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
data_protection_client = self.app.client_manager.data_protection
|
||||
all_projects = bool(parsed_args.project) or parsed_args.all_projects
|
||||
|
||||
search_opts = {
|
||||
'all_tenants': all_projects,
|
||||
'project_id': parsed_args.project,
|
||||
'name': parsed_args.name,
|
||||
'operation_type': parsed_args.operation_type,
|
||||
'trigger_id': parsed_args.trigger_id,
|
||||
'operation_definition': parsed_args.operation_definition,
|
||||
}
|
||||
|
||||
data = data_protection_client.scheduled_operations.list(
|
||||
search_opts=search_opts, marker=parsed_args.marker,
|
||||
limit=parsed_args.limit, sort=parsed_args.sort)
|
||||
|
||||
column_headers = ['Id', 'Name', 'Operation Type', 'Trigger Id',
|
||||
'Operation Definition']
|
||||
|
||||
json_dumps = functools.partial(jsonutils.dumps,
|
||||
indent=2,
|
||||
sort_keys=True)
|
||||
formatters = {
|
||||
"Operation Definition": json_dumps,
|
||||
}
|
||||
return (column_headers,
|
||||
list(osc_utils.get_item_properties(
|
||||
s, column_headers, formatters=formatters,
|
||||
) for s in data))
|
||||
|
||||
|
||||
class ShowScheduledOperation(command.ShowOne):
|
||||
_description = "Shows scheduled_operation details"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowScheduledOperation, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'scheduledoperation',
|
||||
metavar="<scheduledoperation>",
|
||||
help=_('The UUID of the scheduledoperation.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
so = osc_utils.find_resource(client.scheduled_operations,
|
||||
parsed_args.scheduledoperation)
|
||||
|
||||
format_scheduledoperation(so._info)
|
||||
return zip(*sorted(six.iteritems(so._info)))
|
||||
|
||||
|
||||
class CreateScheduledOperation(command.ShowOne):
|
||||
_description = "Creates a scheduled operation"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreateScheduledOperation, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'name',
|
||||
metavar='<name>',
|
||||
help=_('The name of the scheduled operation.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'operation_type',
|
||||
metavar='<operation_type>',
|
||||
help=_('Operation Type of scheduled operation.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'trigger_id',
|
||||
metavar='<trigger_id>',
|
||||
help=_('Trigger id of scheduled operation.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'operation_definition',
|
||||
metavar='<key=value,key=value>',
|
||||
help=_('Operation definition of scheduled operation.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
if not uuidutils.is_uuid_like(parsed_args.trigger_id):
|
||||
raise exceptions.CommandError(
|
||||
"Invalid trigger id provided.")
|
||||
so = client.scheduled_operations.create(
|
||||
parsed_args.name, parsed_args.operation_type,
|
||||
parsed_args.trigger_id, parsed_args.operation_definition)
|
||||
|
||||
format_scheduledoperation(so._info)
|
||||
return zip(*sorted(six.iteritems(so._info)))
|
||||
|
||||
|
||||
class DeleteScheduledOperation(command.Command):
|
||||
_description = "Delete scheduled operation"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(DeleteScheduledOperation, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'scheduledoperation',
|
||||
metavar='<scheduledoperation>',
|
||||
nargs="+",
|
||||
help=_('ID of scheduled operation.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
failure_count = 0
|
||||
for so_id in parsed_args.scheduledoperation:
|
||||
try:
|
||||
so = osc_utils.find_resource(client.scheduled_operations,
|
||||
so_id)
|
||||
client.scheduled_operations.delete(so.id)
|
||||
except exceptions.NotFound:
|
||||
failure_count += 1
|
||||
print("Failed to delete '%s'; scheduled operation "
|
||||
"not found" % so_id)
|
||||
if failure_count == len(parsed_args.scheduledoperation):
|
||||
raise exceptions.CommandError(
|
||||
"Unable to find and delete any of the "
|
||||
"specified scheduled operation.")
|
@ -1,99 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Data protection V1 os-services action implementations"""
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as osc_utils
|
||||
from oslo_log import log as logging
|
||||
|
||||
from karborclient.i18n import _
|
||||
|
||||
|
||||
class ListServices(command.Lister):
|
||||
_description = _("List services.")
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListServices")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListServices, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--host',
|
||||
metavar='<host>',
|
||||
help=_('Filter results by host'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--binary',
|
||||
metavar='<binary>',
|
||||
help=_('Filter results by binary'),
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
data_protection_client = self.app.client_manager.data_protection
|
||||
data = data_protection_client.services.list(
|
||||
host=parsed_args.host,
|
||||
binary=parsed_args.binary
|
||||
)
|
||||
|
||||
column_headers = ["Id", "Binary", "Host", "Status", "State",
|
||||
"Updated_at", "Disabled Reason"]
|
||||
return (column_headers,
|
||||
(osc_utils.get_item_properties(
|
||||
s, column_headers
|
||||
) for s in data))
|
||||
|
||||
|
||||
class EnableService(command.ShowOne):
|
||||
_description = _('Enable service')
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(EnableService, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'service_id',
|
||||
metavar='<service_id>',
|
||||
help=_('The ID of the service.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
service = client.services.enable(parsed_args.service_id)
|
||||
return zip(*sorted(service._info.items()))
|
||||
|
||||
|
||||
class DisableService(command.ShowOne):
|
||||
_description = _('Disable service')
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(DisableService, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'service_id',
|
||||
metavar='<service_id>',
|
||||
help=_('The ID of the service.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--reason',
|
||||
metavar='<reason>',
|
||||
help=_('Reason for disabling the service.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
if parsed_args.reason:
|
||||
service = client.services.disable_log_reason(
|
||||
parsed_args.service_id, parsed_args.reason)
|
||||
else:
|
||||
service = client.services.disable(parsed_args.service_id)
|
||||
return zip(*sorted(service._info.items()))
|
@ -1,229 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Data protection V1 triggers action implementations"""
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as osc_utils
|
||||
from oslo_log import log as logging
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
from karborclient.i18n import _
|
||||
from karborclient import utils
|
||||
|
||||
|
||||
class ListTriggers(command.Lister):
|
||||
_description = _("List triggers.")
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListTriggers")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListTriggers, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--all-projects',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_('Shows details for all tenants. Admin only.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--name',
|
||||
metavar='<name>',
|
||||
default=None,
|
||||
help=_('Filters results by a name. Default=None.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--type',
|
||||
metavar='<type>',
|
||||
default=None,
|
||||
help=_('Filters results by a type. Default=None.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--properties',
|
||||
metavar='<properties>',
|
||||
default=None,
|
||||
help=_('Filters results by a properties. Default=None.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<trigger>',
|
||||
help=_('The last trigger ID of the previous page'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
type=int,
|
||||
metavar='<num-triggers>',
|
||||
help=_('Maximum number of triggers to display'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar="<key>[:<direction>]",
|
||||
default=None,
|
||||
help=_("Sort output by selected keys and directions(asc or desc) "
|
||||
"(default: name:asc), multiple keys and directions can be "
|
||||
"specified separated by comma"),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--project',
|
||||
metavar='<project>',
|
||||
help=_('Display information from single tenant (Admin only).')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
data_protection_client = self.app.client_manager.data_protection
|
||||
all_projects = bool(parsed_args.project) or parsed_args.all_projects
|
||||
|
||||
search_opts = {
|
||||
'all_tenants': all_projects,
|
||||
'project_id': parsed_args.project,
|
||||
'name': parsed_args.name,
|
||||
'type': parsed_args.type,
|
||||
'properties': parsed_args.properties,
|
||||
}
|
||||
|
||||
data = data_protection_client.triggers.list(
|
||||
search_opts=search_opts, marker=parsed_args.marker,
|
||||
limit=parsed_args.limit, sort=parsed_args.sort)
|
||||
|
||||
column_headers = ['Id', 'Name', 'Type', 'Properties']
|
||||
|
||||
return (column_headers,
|
||||
(osc_utils.get_item_properties(
|
||||
s, column_headers
|
||||
) for s in data))
|
||||
|
||||
|
||||
class ShowTrigger(command.ShowOne):
|
||||
_description = "Shows trigger details"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowTrigger, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'trigger',
|
||||
metavar="<trigger>",
|
||||
help=_('The UUID of the trigger.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
trigger = osc_utils.find_resource(client.triggers, parsed_args.trigger)
|
||||
|
||||
trigger._info.pop("links", None)
|
||||
return zip(*sorted(trigger._info.items()))
|
||||
|
||||
|
||||
class CreateTrigger(command.ShowOne):
|
||||
_description = "Creates a trigger"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreateTrigger, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'name',
|
||||
metavar='<name>',
|
||||
help=_('The name of the trigger.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'type',
|
||||
metavar='<type>',
|
||||
help=_('Type of trigger.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'properties',
|
||||
metavar='<key=value,key=value>',
|
||||
help=_('Properties of trigger.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
trigger = client.triggers.create(parsed_args.name, parsed_args.type,
|
||||
parsed_args.properties)
|
||||
|
||||
trigger._info.pop("links", None)
|
||||
return zip(*sorted(trigger._info.items()))
|
||||
|
||||
|
||||
class UpdateTrigger(command.ShowOne):
|
||||
_description = "Update a trigger"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(UpdateTrigger, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
"trigger_id",
|
||||
metavar="<TRIGGER ID>",
|
||||
help=_("Id of trigger to update.")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
metavar="<name>",
|
||||
help=_("A name to which the trigger will be renamed.")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--properties",
|
||||
metavar="<key=value,key=value>",
|
||||
help=_("Properties of trigger which will be updated.")
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
data = {}
|
||||
if parsed_args.name is not None:
|
||||
data['name'] = parsed_args.name
|
||||
if parsed_args.properties is not None:
|
||||
trigger_properties = utils.extract_properties(parsed_args)
|
||||
data['properties'] = trigger_properties
|
||||
try:
|
||||
trigger = osc_utils.find_resource(client.triggers,
|
||||
parsed_args.trigger_id)
|
||||
trigger = client.triggers.update(trigger.id, data)
|
||||
except exceptions.NotFound:
|
||||
raise exceptions.CommandError(
|
||||
"Trigger %s not found" % parsed_args.trigger_id)
|
||||
else:
|
||||
trigger._info.pop("links", None)
|
||||
return zip(*sorted(trigger._info.items()))
|
||||
|
||||
|
||||
class DeleteTrigger(command.Command):
|
||||
_description = "Delete trigger"
|
||||
|
||||
log = logging.getLogger(__name__ + ".DeleteTrigger")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(DeleteTrigger, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'trigger',
|
||||
metavar='<trigger>',
|
||||
nargs="+",
|
||||
help=_('ID of trigger.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
failure_count = 0
|
||||
for trigger_id in parsed_args.trigger:
|
||||
try:
|
||||
trigger = osc_utils.find_resource(client.triggers, trigger_id)
|
||||
client.triggers.delete(trigger.id)
|
||||
except exceptions.NotFound:
|
||||
failure_count += 1
|
||||
self.log.error(
|
||||
"Failed to delete '{0}'; trigger not found".
|
||||
format(trigger_id))
|
||||
if failure_count == len(parsed_args.trigger):
|
||||
raise exceptions.CommandError(
|
||||
"Unable to find and delete any of the "
|
||||
"specified trigger.")
|
@ -1,184 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Data protection V1 verification action implementations"""
|
||||
|
||||
import functools
|
||||
|
||||
from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as osc_utils
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
from karborclient.i18n import _
|
||||
from karborclient import utils
|
||||
|
||||
|
||||
def format_verification(verification_info):
|
||||
for key in ('parameters', 'resources_status',
|
||||
'resources_reason'):
|
||||
if key not in verification_info:
|
||||
continue
|
||||
verification_info[key] = jsonutils.dumps(verification_info[key],
|
||||
indent=2, sort_keys=True)
|
||||
verification_info.pop("links", None)
|
||||
|
||||
|
||||
class ListVerifications(command.Lister):
|
||||
_description = _("List verifications.")
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListVerifications")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListVerifications, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--all-projects',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_('Include all projects (admin only)'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--status',
|
||||
metavar='<status>',
|
||||
help=_('Filter results by status'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<verification>',
|
||||
help=_('The last verification ID of the previous page'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
type=int,
|
||||
metavar='<num-verifications>',
|
||||
help=_('Maximum number of verifications to display'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar="<key>[:<direction>]",
|
||||
default=None,
|
||||
help=_("Sort output by selected keys and directions(asc or desc), "
|
||||
"multiple keys and directions can be "
|
||||
"specified separated by comma"),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--project',
|
||||
metavar='<project>',
|
||||
help=_('Filter results by a project(admin only)')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
data_protection_client = self.app.client_manager.data_protection
|
||||
all_projects = bool(parsed_args.project) or parsed_args.all_projects
|
||||
|
||||
search_opts = {
|
||||
'all_tenants': all_projects,
|
||||
'project_id': parsed_args.project,
|
||||
'status': parsed_args.status,
|
||||
}
|
||||
|
||||
data = data_protection_client.verifications.list(
|
||||
search_opts=search_opts, marker=parsed_args.marker,
|
||||
limit=parsed_args.limit, sort=parsed_args.sort)
|
||||
|
||||
column_headers = ['Id', 'Project id', 'Provider id', 'Checkpoint id',
|
||||
'Parameters', 'Status']
|
||||
|
||||
json_dumps = functools.partial(jsonutils.dumps,
|
||||
indent=2,
|
||||
sort_keys=True)
|
||||
formatters = {
|
||||
"Parameters": json_dumps,
|
||||
}
|
||||
return (column_headers,
|
||||
(osc_utils.get_item_properties(
|
||||
s, column_headers, formatters=formatters,
|
||||
) for s in data))
|
||||
|
||||
|
||||
class ShowVerification(command.ShowOne):
|
||||
_description = "Shows verification details"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowVerification, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'verification',
|
||||
metavar="<verification>",
|
||||
help=_('The UUID of the verification.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
verification = osc_utils.find_resource(client.verifications,
|
||||
parsed_args.verification)
|
||||
|
||||
format_verification(verification._info)
|
||||
return zip(*sorted(verification._info.items()))
|
||||
|
||||
|
||||
class CreateVerification(command.ShowOne):
|
||||
_description = "Creates a verification"
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreateVerification, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'provider_id',
|
||||
metavar='<provider_id>',
|
||||
help=_('The UUID of the provider.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'checkpoint_id',
|
||||
metavar='<checkpoint_id>',
|
||||
help=_('The UUID of the checkpoint.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--parameters-json',
|
||||
type=str,
|
||||
dest='parameters_json',
|
||||
metavar='<parameters>',
|
||||
default=None,
|
||||
help=_('Verification parameters in json format.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--parameters',
|
||||
action='append',
|
||||
metavar='resource_type=<type>[,resource_id=<id>,key=val,...]',
|
||||
default=[],
|
||||
help=_("Verification parameters, may be specified multiple times. "
|
||||
"resource_type: type of resource to apply parameters. "
|
||||
"resource_id: limit the parameters to a specific resource. "
|
||||
"Other keys and values: according to provider\'s "
|
||||
"verification schema.")
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
client = self.app.client_manager.data_protection
|
||||
if not uuidutils.is_uuid_like(parsed_args.provider_id):
|
||||
raise exceptions.CommandError(
|
||||
"Invalid provider id provided.")
|
||||
if not uuidutils.is_uuid_like(parsed_args.checkpoint_id):
|
||||
raise exceptions.CommandError(
|
||||
"Invalid checkpoint id provided.")
|
||||
|
||||
verification_parameters = utils.extract_parameters(parsed_args)
|
||||
verification = client.verifications.create(parsed_args.provider_id,
|
||||
parsed_args.checkpoint_id,
|
||||
verification_parameters)
|
||||
format_verification(verification._info)
|
||||
return zip(*sorted(verification._info.items()))
|
@ -1,458 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Command-line interface to the karbor Project.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import copy
|
||||
import sys
|
||||
|
||||
from keystoneauth1 import discover
|
||||
from keystoneauth1 import exceptions as ks_exc
|
||||
from keystoneauth1.identity.generic import password
|
||||
from keystoneauth1.identity.generic import token
|
||||
from keystoneauth1 import loading
|
||||
from oslo_log import handlers
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import encodeutils
|
||||
from oslo_utils import importutils
|
||||
|
||||
import six
|
||||
import six.moves.urllib.parse as urlparse
|
||||
|
||||
import karborclient
|
||||
from karborclient import client as karbor_client
|
||||
from karborclient.common.apiclient import exceptions as exc
|
||||
from karborclient.common import utils
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KarborShell(object):
|
||||
|
||||
def _append_global_identity_args(self, parser, argv):
|
||||
loading.register_session_argparse_arguments(parser)
|
||||
# Peek into argv to see if os-auth-token (or the deprecated
|
||||
# os_auth_token) or the new os-token or the environment variable
|
||||
# OS_AUTH_TOKEN were given. In which case, the token auth plugin is
|
||||
# what the user wants. Else, we'll default to password.
|
||||
default_auth_plugin = 'password'
|
||||
token_opts = ['os-token', 'os-auth-token', 'os_auth-token']
|
||||
if argv and any(i in token_opts for i in argv):
|
||||
default_auth_plugin = 'token'
|
||||
loading.register_auth_argparse_arguments(
|
||||
parser, argv, default=default_auth_plugin)
|
||||
|
||||
def get_base_parser(self, argv):
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog='karbor',
|
||||
description=__doc__.strip(),
|
||||
epilog='See "karbor help COMMAND" '
|
||||
'for help on a specific command.',
|
||||
add_help=False,
|
||||
formatter_class=HelpFormatter,
|
||||
)
|
||||
|
||||
# Global arguments
|
||||
parser.add_argument('-h', '--help',
|
||||
action='store_true',
|
||||
help=argparse.SUPPRESS, )
|
||||
|
||||
parser.add_argument('--version',
|
||||
action='version',
|
||||
version=karborclient.__version__,
|
||||
help="Show program's version number and exit.")
|
||||
|
||||
parser.add_argument('-d', '--debug',
|
||||
default=bool(utils.env('KARBORCLIENT_DEBUG')),
|
||||
action='store_true',
|
||||
help='Defaults to env[KARBORCLIENT_DEBUG].')
|
||||
|
||||
parser.add_argument('-v', '--verbose',
|
||||
default=False, action="store_true",
|
||||
help="Print more verbose output.")
|
||||
|
||||
# os-cert, os-key, insecure, ca-file are all added
|
||||
# by keystone session register_cli_opts later
|
||||
parser.add_argument('--cert-file',
|
||||
dest='os_cert',
|
||||
help='DEPRECATED! Use --os-cert.')
|
||||
|
||||
parser.add_argument('--key-file',
|
||||
dest='os_key',
|
||||
help='DEPRECATED! Use --os-key.')
|
||||
|
||||
parser.add_argument('--ca-file',
|
||||
dest='os_cacert',
|
||||
help='DEPRECATED! Use --os-cacert.')
|
||||
|
||||
parser.add_argument('--api-timeout',
|
||||
help='Number of seconds to wait for an '
|
||||
'API response, '
|
||||
'defaults to system socket timeout.')
|
||||
|
||||
parser.add_argument('--os_tenant_id',
|
||||
default=utils.env('OS_TENANT_ID'),
|
||||
help='Defaults to env[OS_TENANT_ID].')
|
||||
|
||||
parser.add_argument('--os_tenant_name',
|
||||
default=utils.env('OS_TENANT_NAME'),
|
||||
help='Defaults to env[OS_TENANT_NAME].')
|
||||
|
||||
parser.add_argument('--os-region-name',
|
||||
default=utils.env('OS_REGION_NAME'),
|
||||
help='Defaults to env[OS_REGION_NAME].')
|
||||
|
||||
parser.add_argument('--os-auth-token',
|
||||
default=utils.env('OS_AUTH_TOKEN'),
|
||||
help='Defaults to env[OS_AUTH_TOKEN].')
|
||||
|
||||
parser.add_argument('--os-no-client-auth',
|
||||
default=utils.env('OS_NO_CLIENT_AUTH'),
|
||||
action='store_true',
|
||||
help="Do not contact keystone for a token. "
|
||||
"Defaults to env[OS_NO_CLIENT_AUTH].")
|
||||
parser.add_argument('--karbor-url',
|
||||
default=utils.env('KARBOR_URL'),
|
||||
help='Defaults to env[KARBOR_URL].')
|
||||
|
||||
parser.add_argument('--karbor-api-version',
|
||||
default=utils.env(
|
||||
'KARBOR_API_VERSION', default='1'),
|
||||
help='Defaults to env[KARBOR_API_VERSION] '
|
||||
'or 1.')
|
||||
|
||||
parser.add_argument('--os-service-type',
|
||||
default=utils.env('OS_SERVICE_TYPE'),
|
||||
help='Defaults to env[OS_SERVICE_TYPE].')
|
||||
|
||||
parser.add_argument('--os-endpoint-type',
|
||||
default=utils.env('OS_ENDPOINT_TYPE'),
|
||||
help='Defaults to env[OS_ENDPOINT_TYPE].')
|
||||
|
||||
parser.add_argument('--include-password',
|
||||
default=bool(utils.env('KARBOR_INCLUDE_PASSWORD')),
|
||||
action='store_true',
|
||||
help='Send os-username and os-password to karbor.')
|
||||
|
||||
self._append_global_identity_args(parser, argv)
|
||||
|
||||
return parser
|
||||
|
||||
def get_subcommand_parser(self, version, argv=None):
|
||||
parser = self.get_base_parser(argv)
|
||||
|
||||
self.subcommands = {}
|
||||
subparsers = parser.add_subparsers(metavar='<subcommand>')
|
||||
submodule = importutils.import_versioned_module(
|
||||
'karborclient', version, 'shell'
|
||||
)
|
||||
self._find_actions(subparsers, submodule)
|
||||
self._find_actions(subparsers, self)
|
||||
|
||||
return parser
|
||||
|
||||
def _find_actions(self, subparsers, actions_module):
|
||||
for attr in (a for a in dir(actions_module) if a.startswith('do_')):
|
||||
# I prefer to be hypen-separated instead of underscores.
|
||||
command = attr[3:].replace('_', '-')
|
||||
callback = getattr(actions_module, attr)
|
||||
desc = callback.__doc__ or ''
|
||||
help = desc.strip().split('\n')[0]
|
||||
arguments = getattr(callback, 'arguments', [])
|
||||
|
||||
subparser = subparsers.add_parser(command, help=help,
|
||||
description=desc,
|
||||
add_help=False,
|
||||
formatter_class=HelpFormatter)
|
||||
subparser.add_argument('-h', '--help', action='help',
|
||||
help=argparse.SUPPRESS)
|
||||
self.subcommands[command] = subparser
|
||||
for (args, kwargs) in arguments:
|
||||
subparser.add_argument(*args, **kwargs)
|
||||
subparser.set_defaults(func=callback)
|
||||
|
||||
def _discover_auth_versions(self, session, auth_url):
|
||||
# discover the API versions the server is supporting base on the
|
||||
# given URL
|
||||
v2_auth_url = None
|
||||
v3_auth_url = None
|
||||
try:
|
||||
ks_discover = discover.Discover(session=session, url=auth_url)
|
||||
v2_auth_url = ks_discover.url_for('2.0')
|
||||
v3_auth_url = ks_discover.url_for('3.0')
|
||||
except ks_exc.ClientException as e:
|
||||
# Identity service may not support discover API version.
|
||||
# Lets trying to figure out the API version from the original URL.
|
||||
url_parts = urlparse.urlparse(auth_url)
|
||||
(scheme, netloc, path, params, query, fragment) = url_parts
|
||||
path = path.lower()
|
||||
if path.startswith('/v3'):
|
||||
v3_auth_url = auth_url
|
||||
elif path.startswith('/v2'):
|
||||
v2_auth_url = auth_url
|
||||
else:
|
||||
# not enough information to determine the auth version
|
||||
msg = ('Unable to determine the Keystone version '
|
||||
'to authenticate with using the given '
|
||||
'auth_url. Identity service may not support API '
|
||||
'version discovery. Please provide a versioned '
|
||||
'auth_url instead. error=%s') % (e)
|
||||
raise exc.CommandError(msg)
|
||||
|
||||
return (v2_auth_url, v3_auth_url)
|
||||
|
||||
def _get_keystone_auth(self, session, auth_url, **kwargs):
|
||||
auth_token = kwargs.pop('auth_token', None)
|
||||
if auth_token:
|
||||
return token.Token(
|
||||
auth_url,
|
||||
auth_token,
|
||||
project_id=kwargs.pop('project_id'),
|
||||
project_name=kwargs.pop('project_name'),
|
||||
project_domain_id=kwargs.pop('project_domain_id'),
|
||||
project_domain_name=kwargs.pop('project_domain_name'))
|
||||
|
||||
# NOTE(starodubcevna): this is a workaround for the bug:
|
||||
# https://bugs.launchpad.net/python-openstackclient/+bug/1447704
|
||||
# Change that fix this error in keystoneclient was abandoned,
|
||||
# so we should use workaround until we move to keystoneauth.
|
||||
# The idea of the code came from glanceclient.
|
||||
|
||||
(v2_auth_url, v3_auth_url) = self._discover_auth_versions(
|
||||
session=session,
|
||||
auth_url=auth_url)
|
||||
|
||||
if v3_auth_url:
|
||||
# NOTE(starodubcevna): set user_domain_id and project_domain_id
|
||||
# to default as it done in other projects.
|
||||
return password.Password(auth_url,
|
||||
username=kwargs.pop('username'),
|
||||
user_id=kwargs.pop('user_id'),
|
||||
password=kwargs.pop('password'),
|
||||
user_domain_id=kwargs.pop(
|
||||
'user_domain_id') or 'default',
|
||||
user_domain_name=kwargs.pop(
|
||||
'user_domain_name'),
|
||||
project_id=kwargs.pop('project_id'),
|
||||
project_name=kwargs.pop('project_name'),
|
||||
project_domain_id=kwargs.pop(
|
||||
'project_domain_id') or 'default')
|
||||
elif v2_auth_url:
|
||||
return password.Password(auth_url,
|
||||
username=kwargs.pop('username'),
|
||||
user_id=kwargs.pop('user_id'),
|
||||
password=kwargs.pop('password'),
|
||||
project_id=kwargs.pop('project_id'),
|
||||
project_name=kwargs.pop('project_name'))
|
||||
else:
|
||||
# if we get here it means domain information is provided
|
||||
# (caller meant to use Keystone V3) but the auth url is
|
||||
# actually Keystone V2. Obviously we can't authenticate a V3
|
||||
# user using V2.
|
||||
exc.CommandError("Credential and auth_url mismatch. The given "
|
||||
"auth_url is using Keystone V2 endpoint, which "
|
||||
"may not able to handle Keystone V3 credentials. "
|
||||
"Please provide a correct Keystone V3 auth_url.")
|
||||
|
||||
def _setup_logging(self, debug):
|
||||
# Output the logs to command-line interface
|
||||
color_handler = handlers.ColorHandler(sys.stdout)
|
||||
logger_root = logging.getLogger(None).logger
|
||||
logger_root.level = logging.DEBUG if debug else logging.WARNING
|
||||
logger_root.addHandler(color_handler)
|
||||
|
||||
# Set the logger level of special library
|
||||
logging.getLogger('iso8601') \
|
||||
.logger.setLevel(logging.WARNING)
|
||||
logging.getLogger('urllib3.connectionpool') \
|
||||
.logger.setLevel(logging.WARNING)
|
||||
|
||||
def main(self, argv):
|
||||
# Parse args once to find version
|
||||
base_argv = copy.deepcopy(argv)
|
||||
parser = self.get_base_parser(argv)
|
||||
(options, args) = parser.parse_known_args(base_argv)
|
||||
self._setup_logging(options.debug)
|
||||
|
||||
# build available subcommands based on version
|
||||
api_version = options.karbor_api_version
|
||||
subcommand_parser = self.get_subcommand_parser(api_version, argv)
|
||||
self.parser = subcommand_parser
|
||||
|
||||
ks_session = None
|
||||
keystone_auth = None
|
||||
|
||||
# Handle top-level --help/-h before attempting to parse
|
||||
# a command off the command line.
|
||||
if (not args and options.help) or not argv:
|
||||
self.do_help(options)
|
||||
return 0
|
||||
|
||||
# Parse args again and call whatever callback was selected.
|
||||
args = subcommand_parser.parse_args(argv)
|
||||
|
||||
# Short-circuit and deal with help command right away.
|
||||
if args.func == self.do_help:
|
||||
self.do_help(args)
|
||||
return 0
|
||||
elif args.func == self.do_bash_completion:
|
||||
self.do_bash_completion(args)
|
||||
return 0
|
||||
|
||||
if not args.os_username and not args.os_auth_token:
|
||||
raise exc.CommandError("You must provide a username via"
|
||||
" either --os-username or env[OS_USERNAME]"
|
||||
" or a token via --os-auth-token or"
|
||||
" env[OS_AUTH_TOKEN]")
|
||||
|
||||
if args.os_no_client_auth:
|
||||
if not args.karbor_url:
|
||||
raise exc.CommandError(
|
||||
"If you specify --os-no-client-auth"
|
||||
" you must also specify a Karbor API URL"
|
||||
" via either --karbor-url or env[KARBOR_URL]")
|
||||
|
||||
else:
|
||||
# Tenant name or ID is needed to make keystoneclient retrieve a
|
||||
# service catalog, it's not required if os_no_client_auth is
|
||||
# specified, neither is the auth URL.
|
||||
if not any([args.os_tenant_name, args.os_tenant_id,
|
||||
args.os_project_id, args.os_project_name]):
|
||||
raise exc.CommandError("You must provide a project name or"
|
||||
" project id via --os-project-name,"
|
||||
" --os-project-id, env[OS_PROJECT_ID]"
|
||||
" or env[OS_PROJECT_NAME]. You may"
|
||||
" use os-project and os-tenant"
|
||||
" interchangeably.")
|
||||
if not args.os_auth_url:
|
||||
raise exc.CommandError("You must provide an auth url via"
|
||||
" either --os-auth-url or via"
|
||||
" env[OS_AUTH_URL]")
|
||||
|
||||
endpoint = args.karbor_url
|
||||
|
||||
if args.os_no_client_auth:
|
||||
# Authenticate through karbor, don't use session
|
||||
kwargs = {
|
||||
'username': args.os_username,
|
||||
'password': args.os_password,
|
||||
'auth_token': args.os_auth_token,
|
||||
'auth_url': args.os_auth_url,
|
||||
'token': args.os_auth_token,
|
||||
'insecure': args.insecure,
|
||||
'timeout': args.api_timeout
|
||||
}
|
||||
|
||||
if args.os_region_name:
|
||||
kwargs['region_name'] = args.os_region_name
|
||||
else:
|
||||
# Create a keystone session and keystone auth
|
||||
ks_session = loading.load_session_from_argparse_arguments(args)
|
||||
project_id = args.os_project_id or args.os_tenant_id
|
||||
project_name = args.os_project_name or args.os_tenant_name
|
||||
|
||||
keystone_auth = self._get_keystone_auth(
|
||||
ks_session,
|
||||
args.os_auth_url,
|
||||
username=args.os_username,
|
||||
user_id=args.os_user_id,
|
||||
user_domain_id=args.os_user_domain_id,
|
||||
user_domain_name=args.os_user_domain_name,
|
||||
password=args.os_password,
|
||||
auth_token=args.os_auth_token,
|
||||
project_id=project_id,
|
||||
project_name=project_name,
|
||||
project_domain_id=args.os_project_domain_id,
|
||||
project_domain_name=args.os_project_domain_name)
|
||||
|
||||
endpoint_type = args.os_endpoint_type or 'publicURL'
|
||||
service_type = args.os_service_type or 'data-protect'
|
||||
|
||||
endpoint = keystone_auth.get_endpoint(
|
||||
ks_session,
|
||||
service_type=service_type,
|
||||
region_name=args.os_region_name)
|
||||
|
||||
kwargs = {
|
||||
'session': ks_session,
|
||||
'auth': keystone_auth,
|
||||
'service_type': service_type,
|
||||
'endpoint_type': endpoint_type,
|
||||
'region_name': args.os_region_name,
|
||||
}
|
||||
|
||||
if args.api_timeout:
|
||||
kwargs['timeout'] = args.api_timeout
|
||||
|
||||
self.cs = karbor_client.Client(api_version, endpoint, **kwargs)
|
||||
|
||||
args.func(self.cs, args)
|
||||
|
||||
def do_bash_completion(self, args):
|
||||
"""Prints all of the commands and options to stdout."""
|
||||
commands = set()
|
||||
options = set()
|
||||
for sc_str, sc in self.subcommands.items():
|
||||
commands.add(sc_str)
|
||||
for option in list(sc._optionals._option_string_actions):
|
||||
options.add(option)
|
||||
|
||||
commands.remove('bash-completion')
|
||||
print(' '.join(commands | options))
|
||||
|
||||
@utils.arg('command', metavar='<subcommand>', nargs='?',
|
||||
help='Display help for <subcommand>')
|
||||
def do_help(self, args):
|
||||
"""Display help about this program or one of its subcommands.
|
||||
|
||||
"""
|
||||
if getattr(args, 'command', None):
|
||||
if args.command in self.subcommands:
|
||||
self.subcommands[args.command].print_help()
|
||||
else:
|
||||
msg = "'%s' is not a valid subcommand"
|
||||
raise exc.CommandError(msg % args.command)
|
||||
else:
|
||||
self.parser.print_help()
|
||||
|
||||
|
||||
class HelpFormatter(argparse.HelpFormatter):
|
||||
def start_section(self, heading):
|
||||
# Title-case the headings
|
||||
heading = '%s%s' % (heading[0].upper(), heading[1:])
|
||||
super(HelpFormatter, self).start_section(heading)
|
||||
|
||||
|
||||
def main(args=sys.argv[1:]):
|
||||
try:
|
||||
KarborShell().main(args)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print('... terminating karbor client', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
if '--debug' in args or '-d' in args:
|
||||
raise
|
||||
else:
|
||||
print(encodeutils.safe_encode(six.text_type(e)), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1,63 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
|
||||
import fixtures
|
||||
import requests
|
||||
import testtools
|
||||
|
||||
|
||||
class TestCaseShell(testtools.TestCase):
|
||||
TEST_REQUEST_BASE = {
|
||||
'verify': True,
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestCaseShell, self).setUp()
|
||||
if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or
|
||||
os.environ.get('OS_STDOUT_CAPTURE') == '1'):
|
||||
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
|
||||
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
|
||||
if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or
|
||||
os.environ.get('OS_STDERR_CAPTURE') == '1'):
|
||||
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
|
||||
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
|
||||
|
||||
|
||||
class TestResponse(requests.Response):
|
||||
"""Class used to wrap requests.Response and provide some
|
||||
|
||||
convenience to initialize with a dict.
|
||||
"""
|
||||
|
||||
def __init__(self, data):
|
||||
self._text = None
|
||||
super(TestResponse, self)
|
||||
if isinstance(data, dict):
|
||||
self.status_code = data.get('status_code')
|
||||
self.headers = data.get('headers')
|
||||
self.reason = data.get('reason', '')
|
||||
# Fake the text attribute to streamline Response creation
|
||||
self._text = data.get('text')
|
||||
else:
|
||||
self.status_code = data
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
@property
|
||||
def text(self):
|
||||
return self._text
|
@ -1,135 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
|
||||
class FakeHTTPResponse(object):
|
||||
|
||||
version = 1.1
|
||||
|
||||
def __init__(self, status_code, reason, headers, content):
|
||||
self.headers = headers
|
||||
self.content = content
|
||||
self.status_code = status_code
|
||||
self.reason = reason
|
||||
self.raw = FakeRaw()
|
||||
|
||||
def getheader(self, name, default=None):
|
||||
return self.headers.get(name, default)
|
||||
|
||||
def getheaders(self):
|
||||
return self.headers.items()
|
||||
|
||||
def read(self, amt=None):
|
||||
b = self.content
|
||||
self.content = None
|
||||
return b
|
||||
|
||||
def iter_content(self, chunksize):
|
||||
return self.content
|
||||
|
||||
def json(self):
|
||||
return jsonutils.loads(self.content)
|
||||
|
||||
|
||||
class FakeRaw(object):
|
||||
version = 110
|
||||
|
||||
|
||||
class FakeClient(object):
|
||||
|
||||
def _dict_match(self, partial, real):
|
||||
|
||||
result = True
|
||||
try:
|
||||
for key, value in partial.items():
|
||||
if type(value) is dict:
|
||||
result = self._dict_match(value, real[key])
|
||||
else:
|
||||
assert real[key] == value
|
||||
result = True
|
||||
except (AssertionError, KeyError):
|
||||
result = False
|
||||
return result
|
||||
|
||||
def assert_called(self, method, url, body=None,
|
||||
partial_body=None, pos=-1, **kwargs):
|
||||
"""Assert than an API method was just called.
|
||||
|
||||
"""
|
||||
|
||||
expected = (method, url)
|
||||
called = self.client.callstack[pos][0:2]
|
||||
|
||||
assert self.client.callstack, ("Expected %s %s but no calls "
|
||||
"were made." % expected)
|
||||
|
||||
assert expected == called, 'Expected %s %s; got %s %s' % (
|
||||
expected + called)
|
||||
|
||||
if body is not None:
|
||||
assert self.client.callstack[pos][2] == body
|
||||
|
||||
if partial_body is not None:
|
||||
try:
|
||||
assert self._dict_match(partial_body,
|
||||
self.client.callstack[pos][2])
|
||||
except AssertionError:
|
||||
print(self.client.callstack[pos][2])
|
||||
print("does not contain")
|
||||
print(partial_body)
|
||||
raise
|
||||
|
||||
def assert_called_anytime(self, method, url, body=None, partial_body=None):
|
||||
"""Assert than an API method was called anytime in the test.
|
||||
|
||||
"""
|
||||
|
||||
expected = (method, url)
|
||||
|
||||
assert self.client.callstack, ("Expected %s %s but no calls "
|
||||
"were made." % expected)
|
||||
|
||||
found = False
|
||||
for entry in self.client.callstack:
|
||||
if expected == entry[0:2]:
|
||||
found = True
|
||||
break
|
||||
|
||||
assert found, 'Expected %s %s; got %s' % (
|
||||
expected + (self.client.callstack, ))
|
||||
|
||||
if body is not None:
|
||||
try:
|
||||
assert entry[2] == body
|
||||
except AssertionError:
|
||||
print(entry[2])
|
||||
print("!=")
|
||||
print(body)
|
||||
raise
|
||||
|
||||
if partial_body is not None:
|
||||
try:
|
||||
assert self._dict_match(partial_body, entry[2])
|
||||
except AssertionError:
|
||||
print(entry[2])
|
||||
print("does not contain")
|
||||
print(partial_body)
|
||||
raise
|
||||
|
||||
def clear_callstack(self):
|
||||
self.client.callstack = []
|
||||
|
||||
def authenticate(self):
|
||||
pass
|
@ -1 +0,0 @@
|
||||
__author__ = 'c00179918'
|
@ -1,29 +0,0 @@
|
||||
# Copyright (c) 2015 Mirantis Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import mock
|
||||
from osc_lib.tests import utils
|
||||
|
||||
|
||||
class TestDataProtection(utils.TestCommand):
|
||||
|
||||
def setUp(self):
|
||||
super(TestDataProtection, self).setUp()
|
||||
|
||||
self.app.client_manager.data_protection = mock.Mock()
|
||||
self.app.client_manager.network = mock.Mock()
|
||||
self.app.client_manager.compute = mock.Mock()
|
||||
self.app.client_manager.volume = mock.Mock()
|
@ -1,254 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from karborclient.osc.v1 import checkpoints as osc_checkpoints
|
||||
from karborclient.tests.unit.osc.v1 import fakes
|
||||
from karborclient.v1 import checkpoints
|
||||
|
||||
|
||||
CHECKPOINT_INFO = {
|
||||
"id": "dcb20606-ad71-40a3-80e4-ef0fafdad0c3",
|
||||
"project_id": "e486a2f49695423ca9c47e589b948108",
|
||||
"status": "available",
|
||||
"protection_plan": {
|
||||
"id": "3523a271-68aa-42f5-b9ba-56e5200a2ebb",
|
||||
"name": "My application",
|
||||
"provider_id": "cf56bd3e-97a7-4078-b6d5-f36246333fd9",
|
||||
"resources": [{
|
||||
"id": "99777fdd-8a5b-45ab-ba2c-52420008103f",
|
||||
"type": "OS::Glance::Image",
|
||||
"name": "cirros-0.3.4-x86_64-uec"}]
|
||||
},
|
||||
"resource_graph": jsonutils.dumps(
|
||||
"[{'0x0': ['OS::Glance::Image', "
|
||||
"'99777fdd-8a5b-45ab-ba2c-52420008103f', "
|
||||
"'cirros-0.3.4-x86_64-uec']}, [[['0x0']]]]"
|
||||
),
|
||||
}
|
||||
|
||||
CHECKPOINT_INFO_2 = {
|
||||
"id": "a6fd95fe-0892-43b2-ad3c-e56f3a1b86b8",
|
||||
"project_id": "79b35e99a6a541b3bcede40f590d6878",
|
||||
"status": "available",
|
||||
"protection_plan": {
|
||||
"id": "3b47fd5d-21f9-4e63-8409-0acb1bffc038",
|
||||
"name": "My application",
|
||||
"provider_id": "cf56bd3e-97a7-4078-b6d5-f36246333fd9",
|
||||
"resources": [{
|
||||
"id": "99777fdd-8a5b-45ab-ba2c-52420008103f",
|
||||
"type": "OS::Glance::Image",
|
||||
"name": "cirros-0.3.4-x86_64-uec"}]
|
||||
},
|
||||
"resource_graph": jsonutils.dumps(
|
||||
"[{'0x0': ['OS::Glance::Image', "
|
||||
"'99777fdd-8a5b-45ab-ba2c-52420008103f', "
|
||||
"'cirros-0.3.4-x86_64-uec']}, [[['0x0']]]]"
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class TestCheckpoints(fakes.TestDataProtection):
|
||||
def setUp(self):
|
||||
super(TestCheckpoints, self).setUp()
|
||||
cm = self.app.client_manager
|
||||
self.checkpoints_mock = cm.data_protection.checkpoints
|
||||
self.checkpoints_mock.reset_mock()
|
||||
|
||||
|
||||
class TestListCheckpoints(TestCheckpoints):
|
||||
def setUp(self):
|
||||
super(TestListCheckpoints, self).setUp()
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_checkpoints.ListCheckpoints(self.app, None)
|
||||
|
||||
def test_checkpoints_list(self):
|
||||
self.checkpoints_mock.list.return_value = [checkpoints.Checkpoint(
|
||||
None, copy.deepcopy(CHECKPOINT_INFO))]
|
||||
arglist = ['cf56bd3e-97a7-4078-b6d5-f36246333fd9']
|
||||
verifylist = [('provider_id', 'cf56bd3e-97a7-4078-b6d5-f36246333fd9')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
['Id', 'Project id', 'Status', 'Protection plan', 'Metadata',
|
||||
'Created at'])
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
expected_data = [(
|
||||
"dcb20606-ad71-40a3-80e4-ef0fafdad0c3",
|
||||
"e486a2f49695423ca9c47e589b948108",
|
||||
"available",
|
||||
"Name: %(name)s\nId: %(id)s" % {
|
||||
"id": "3523a271-68aa-42f5-b9ba-56e5200a2ebb",
|
||||
"name": "My application",
|
||||
},
|
||||
'',
|
||||
'')]
|
||||
self.assertEqual(expected_data, list(data))
|
||||
|
||||
def test_checkpoints_list_with_all_projects(self):
|
||||
self.checkpoints_mock.list.return_value = [checkpoints.Checkpoint(
|
||||
None, copy.deepcopy(CHECKPOINT_INFO)), checkpoints.Checkpoint(
|
||||
None, copy.deepcopy(CHECKPOINT_INFO_2))]
|
||||
arglist = ['cf56bd3e-97a7-4078-b6d5-f36246333fd9', '--all-projects']
|
||||
verifylist = [('provider_id', 'cf56bd3e-97a7-4078-b6d5-f36246333fd9'),
|
||||
('all_projects', True)]
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
expected_columns = (
|
||||
['Id', 'Project id', 'Status', 'Protection plan', 'Metadata',
|
||||
'Created at'])
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
expected_data = [(
|
||||
"dcb20606-ad71-40a3-80e4-ef0fafdad0c3",
|
||||
"e486a2f49695423ca9c47e589b948108",
|
||||
"available",
|
||||
"Name: %(name)s\nId: %(id)s" % {
|
||||
"id": "3523a271-68aa-42f5-b9ba-56e5200a2ebb",
|
||||
"name": "My application",
|
||||
},
|
||||
'',
|
||||
''), (
|
||||
"a6fd95fe-0892-43b2-ad3c-e56f3a1b86b8",
|
||||
"79b35e99a6a541b3bcede40f590d6878",
|
||||
"available",
|
||||
"Name: %(name)s\nId: %(id)s" % {
|
||||
"id": "3b47fd5d-21f9-4e63-8409-0acb1bffc038",
|
||||
"name": "My application",
|
||||
},
|
||||
'',
|
||||
'')
|
||||
]
|
||||
self.assertEqual(expected_data, list(data))
|
||||
|
||||
|
||||
class TestCreateCheckpoint(TestCheckpoints):
|
||||
def setUp(self):
|
||||
super(TestCreateCheckpoint, self).setUp()
|
||||
self.checkpoints_mock.create.return_value = checkpoints.Checkpoint(
|
||||
None, copy.deepcopy(CHECKPOINT_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_checkpoints.CreateCheckpoint(self.app, None)
|
||||
|
||||
def test_checkpoint_create(self):
|
||||
arglist = ['cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'3523a271-68aa-42f5-b9ba-56e5200a2ebb']
|
||||
verifylist = [('provider_id', 'cf56bd3e-97a7-4078-b6d5-f36246333fd9'),
|
||||
('plan_id', '3523a271-68aa-42f5-b9ba-56e5200a2ebb')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.checkpoints_mock.create.assert_called_once_with(
|
||||
'cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'3523a271-68aa-42f5-b9ba-56e5200a2ebb',
|
||||
None)
|
||||
|
||||
|
||||
class TestShowCheckpoint(TestCheckpoints):
|
||||
def setUp(self):
|
||||
super(TestShowCheckpoint, self).setUp()
|
||||
self.checkpoints_mock.get.return_value = checkpoints.Checkpoint(
|
||||
None, copy.deepcopy(CHECKPOINT_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_checkpoints.ShowCheckpoint(self.app, None)
|
||||
|
||||
def test_checkpoint_show(self):
|
||||
arglist = ['cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3']
|
||||
verifylist = [('provider_id', 'cf56bd3e-97a7-4078-b6d5-f36246333fd9'),
|
||||
('checkpoint_id',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.checkpoints_mock.get.assert_called_once_with(
|
||||
'cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3')
|
||||
|
||||
|
||||
class TestDeleteCheckpoint(TestCheckpoints):
|
||||
def setUp(self):
|
||||
super(TestDeleteCheckpoint, self).setUp()
|
||||
self.checkpoints_mock.get.return_value = checkpoints.Checkpoint(
|
||||
None, copy.deepcopy(CHECKPOINT_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_checkpoints.DeleteCheckpoint(self.app, None)
|
||||
|
||||
def test_checkpoint_delete(self):
|
||||
arglist = ['cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3']
|
||||
verifylist = [('provider_id', 'cf56bd3e-97a7-4078-b6d5-f36246333fd9'),
|
||||
('checkpoint',
|
||||
['dcb20606-ad71-40a3-80e4-ef0fafdad0c3'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.checkpoints_mock.delete.assert_called_once_with(
|
||||
'cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3')
|
||||
|
||||
|
||||
class TestResetCheckpointState(TestCheckpoints):
|
||||
def setUp(self):
|
||||
super(TestResetCheckpointState, self).setUp()
|
||||
self.cmd = osc_checkpoints.ResetCheckpointState(self.app, None)
|
||||
|
||||
def test_reset_checkpoint_with_default_state(self):
|
||||
arglist = ['cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3']
|
||||
verifylist = [('provider_id', 'cf56bd3e-97a7-4078-b6d5-f36246333fd9'),
|
||||
('checkpoint',
|
||||
['dcb20606-ad71-40a3-80e4-ef0fafdad0c3']),
|
||||
('state', 'error')]
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.checkpoints_mock.reset_state.assert_called_once_with(
|
||||
'cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3',
|
||||
'error')
|
||||
|
||||
def test_reset_checkpoint(self):
|
||||
arglist = ['cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3',
|
||||
'--available']
|
||||
verifylist = [('provider_id', 'cf56bd3e-97a7-4078-b6d5-f36246333fd9'),
|
||||
('checkpoint',
|
||||
['dcb20606-ad71-40a3-80e4-ef0fafdad0c3']),
|
||||
('state', 'available')]
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.checkpoints_mock.reset_state.assert_called_once_with(
|
||||
'cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3',
|
||||
'available')
|
@ -1,128 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from karborclient.osc.v1 import operation_logs as osc_operation_logs
|
||||
from karborclient.tests.unit.osc.v1 import fakes
|
||||
from karborclient.v1 import operation_logs
|
||||
|
||||
|
||||
OPERATIONLOG_INFO = {
|
||||
"id": "22b82aa7-9179-4c71-bba2-caf5c0e68db7",
|
||||
"project_id": "e486a2f49695423ca9c47e589b948108",
|
||||
"operation_type": "protect",
|
||||
"checkpoint_id": "dcb20606-ad71-40a3-80e4-ef0fafdad0c3",
|
||||
"plan_id": "cf56bd3e-97a7-4078-b6d5-f36246333fd9",
|
||||
"provider_id": "23902b02-5666-4ee6-8dfe-962ac09c3994",
|
||||
"restore_id": None,
|
||||
"scheduled_operation_id": "23902b02-5666-4ee6-8dfe-962ac09c3991",
|
||||
"started_at": "2015-08-27T09:50:58-05:00",
|
||||
"ended_at": "2015-08-27T10:50:58-05:00",
|
||||
"status": "protecting",
|
||||
"error_info": "Could not access bank",
|
||||
"extra_info": None
|
||||
}
|
||||
|
||||
|
||||
class TestOperationLogs(fakes.TestDataProtection):
|
||||
def setUp(self):
|
||||
super(TestOperationLogs, self).setUp()
|
||||
self.operation_logs_mock = (
|
||||
self.app.client_manager.data_protection.operation_logs)
|
||||
self.operation_logs_mock.reset_mock()
|
||||
|
||||
|
||||
class TestListOperationLogs(TestOperationLogs):
|
||||
def setUp(self):
|
||||
super(TestListOperationLogs, self).setUp()
|
||||
self.operation_logs_mock.list.return_value = [
|
||||
operation_logs.OperationLog(None,
|
||||
copy.deepcopy(OPERATIONLOG_INFO))]
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_operation_logs.ListOperationLogs(self.app, None)
|
||||
|
||||
def test_operation_logs_list(self):
|
||||
arglist = ['--status', 'success']
|
||||
verifylist = [('status', 'success')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
['Id', 'Operation Type', 'Checkpoint id', 'Plan Id',
|
||||
'Provider id', 'Restore Id', 'Scheduled Operation Id',
|
||||
'Status', 'Started At', 'Ended At', 'Error Info',
|
||||
'Extra Info'])
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
expected_data = [("22b82aa7-9179-4c71-bba2-caf5c0e68db7",
|
||||
"protect",
|
||||
"dcb20606-ad71-40a3-80e4-ef0fafdad0c3",
|
||||
"cf56bd3e-97a7-4078-b6d5-f36246333fd9",
|
||||
"23902b02-5666-4ee6-8dfe-962ac09c3994",
|
||||
None,
|
||||
"23902b02-5666-4ee6-8dfe-962ac09c3991",
|
||||
"protecting",
|
||||
"2015-08-27T09:50:58-05:00",
|
||||
"2015-08-27T10:50:58-05:00",
|
||||
"Could not access bank",
|
||||
None)]
|
||||
self.assertEqual(expected_data, list(data))
|
||||
|
||||
|
||||
class TestShowOperationLog(TestOperationLogs):
|
||||
def setUp(self):
|
||||
super(TestShowOperationLog, self).setUp()
|
||||
self._oplog_info = copy.deepcopy(OPERATIONLOG_INFO)
|
||||
self.operation_logs_mock.get.return_value = (
|
||||
operation_logs.OperationLog(None, self._oplog_info))
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_operation_logs.ShowOperationLog(self.app, None)
|
||||
|
||||
def test_operation_log_show(self):
|
||||
arglist = ['22b82aa7-9179-4c71-bba2-caf5c0e68db7']
|
||||
verifylist = [('operation_log',
|
||||
'22b82aa7-9179-4c71-bba2-caf5c0e68db7')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
'checkpoint_id', 'ended_at', 'error_info', 'extra_info',
|
||||
'id', 'operation_type', 'plan_id', 'project_id',
|
||||
'provider_id', 'restore_id', 'scheduled_operation_id',
|
||||
'started_at', 'status')
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
self.assertEqual(self._oplog_info['checkpoint_id'], data[0])
|
||||
self.assertEqual(self._oplog_info['ended_at'], data[1])
|
||||
self.assertEqual(self._oplog_info['error_info'], data[2])
|
||||
self.assertEqual(self._oplog_info['extra_info'], data[3])
|
||||
self.assertEqual(self._oplog_info['id'], data[4])
|
||||
self.assertEqual(self._oplog_info['operation_type'], data[5])
|
||||
self.assertEqual(self._oplog_info['plan_id'], data[6])
|
||||
self.assertEqual(self._oplog_info['project_id'], data[7])
|
||||
self.assertEqual(self._oplog_info['provider_id'], data[8])
|
||||
self.assertEqual(self._oplog_info['restore_id'], data[9])
|
||||
self.assertEqual(self._oplog_info['scheduled_operation_id'], data[10])
|
||||
self.assertEqual(self._oplog_info['started_at'], data[11])
|
||||
self.assertEqual(self._oplog_info['status'], data[12])
|
@ -1,183 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from karborclient.osc.v1 import plans as osc_plans
|
||||
from karborclient.tests.unit.osc.v1 import fakes
|
||||
from karborclient.v1 import plans
|
||||
|
||||
|
||||
PLAN_INFO = {
|
||||
"status": "suspended",
|
||||
"provider_id": "cf56bd3e-97a7-4078-b6d5-f36246333fd9",
|
||||
"description": "",
|
||||
"parameters": {},
|
||||
"id": "204c825e-eb2f-4609-95ab-70b3caa43ac8",
|
||||
"resources": [{
|
||||
'type': 'OS::Cinder::Volume',
|
||||
'id': '71bfe64a-e0b9-4a91-9e15-a7fc9ab31b14',
|
||||
'name': 'testsinglevolume'}],
|
||||
"name": "OS Volume protection plan."
|
||||
}
|
||||
|
||||
|
||||
class TestPlans(fakes.TestDataProtection):
|
||||
def setUp(self):
|
||||
super(TestPlans, self).setUp()
|
||||
self.plans_mock = self.app.client_manager.data_protection.plans
|
||||
self.plans_mock.reset_mock()
|
||||
|
||||
|
||||
class TestListPlans(TestPlans):
|
||||
def setUp(self):
|
||||
super(TestListPlans, self).setUp()
|
||||
self.plans_mock.list.return_value = [plans.Plan(
|
||||
None, copy.deepcopy(PLAN_INFO))]
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_plans.ListPlans(self.app, None)
|
||||
|
||||
def test_plans_list(self):
|
||||
arglist = ['--status', 'suspended']
|
||||
verifylist = [('status', 'suspended')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
['Id', 'Name', 'Description', 'Provider id', 'Status'])
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
expected_data = [("204c825e-eb2f-4609-95ab-70b3caa43ac8",
|
||||
"OS Volume protection plan.",
|
||||
"",
|
||||
"cf56bd3e-97a7-4078-b6d5-f36246333fd9",
|
||||
"suspended")]
|
||||
self.assertEqual(expected_data, list(data))
|
||||
|
||||
|
||||
class TestCreatePlan(TestPlans):
|
||||
def setUp(self):
|
||||
super(TestCreatePlan, self).setUp()
|
||||
self.plans_mock.create.return_value = plans.Plan(
|
||||
None, copy.deepcopy(PLAN_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_plans.CreatePlan(self.app, None)
|
||||
|
||||
def test_plan_create(self):
|
||||
arglist = ['OS Volume protection plan.',
|
||||
'cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
"'71bfe64a-e0b9-4a91-9e15-a7fc9ab31b14'="
|
||||
"'OS::Cinder::Volume'='testsinglevolume'"]
|
||||
verifylist = [('name', 'OS Volume protection plan.'),
|
||||
('provider_id', 'cf56bd3e-97a7-4078-b6d5-f36246333fd9'),
|
||||
('resources', "'71bfe64a-e0b9-4a91-9e15-a7fc9ab31b14'="
|
||||
"'OS::Cinder::Volume'='testsinglevolume'")]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.plans_mock.create.assert_called_once_with(
|
||||
'OS Volume protection plan.',
|
||||
'cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
[{'id': "'71bfe64a-e0b9-4a91-9e15-a7fc9ab31b14'",
|
||||
'type': "'OS::Cinder::Volume'",
|
||||
'name': "'testsinglevolume'"}],
|
||||
{}, description=None)
|
||||
|
||||
|
||||
class TestUpdatePlan(TestPlans):
|
||||
def setUp(self):
|
||||
super(TestUpdatePlan, self).setUp()
|
||||
self.plans_mock.get.return_value = plans.Plan(
|
||||
None, copy.deepcopy(PLAN_INFO))
|
||||
self.plans_mock.update.return_value = plans.Plan(
|
||||
None, copy.deepcopy(PLAN_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_plans.UpdatePlan(self.app, None)
|
||||
|
||||
def test_plan_update(self):
|
||||
arglist = ['204c825e-eb2f-4609-95ab-70b3caa43ac8',
|
||||
'--status', 'started']
|
||||
verifylist = [('plan_id', '204c825e-eb2f-4609-95ab-70b3caa43ac8'),
|
||||
('status', 'started')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.plans_mock.update.assert_called_once_with(
|
||||
'204c825e-eb2f-4609-95ab-70b3caa43ac8',
|
||||
{'status': 'started'})
|
||||
|
||||
|
||||
class TestDeletePlan(TestPlans):
|
||||
def setUp(self):
|
||||
super(TestDeletePlan, self).setUp()
|
||||
self.plans_mock.get.return_value = plans.Plan(
|
||||
None, copy.deepcopy(PLAN_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_plans.DeletePlan(self.app, None)
|
||||
|
||||
def test_plan_delete(self):
|
||||
arglist = ['204c825e-eb2f-4609-95ab-70b3caa43ac8']
|
||||
verifylist = [('plan', ['204c825e-eb2f-4609-95ab-70b3caa43ac8'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.plans_mock.delete.assert_called_once_with(
|
||||
'204c825e-eb2f-4609-95ab-70b3caa43ac8')
|
||||
|
||||
|
||||
class TestShowPlan(TestPlans):
|
||||
def setUp(self):
|
||||
super(TestShowPlan, self).setUp()
|
||||
self._plan_info = copy.deepcopy(PLAN_INFO)
|
||||
self.plans_mock.get.return_value = plans.Plan(
|
||||
None, self._plan_info)
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_plans.ShowPlan(self.app, None)
|
||||
|
||||
def test_plan_show(self):
|
||||
arglist = ['204c825e-eb2f-4609-95ab-70b3caa43ac8']
|
||||
verifylist = [('plan', '204c825e-eb2f-4609-95ab-70b3caa43ac8')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
'description', 'id', 'name', 'parameters', 'provider_id',
|
||||
'resources', 'status')
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
self.assertEqual(self._plan_info['description'], data[0])
|
||||
self.assertEqual(self._plan_info['id'], data[1])
|
||||
self.assertEqual(self._plan_info['name'], data[2])
|
||||
self.assertEqual(self._plan_info['parameters'], data[3])
|
||||
self.assertEqual(self._plan_info['provider_id'], data[4])
|
||||
self.assertEqual(self._plan_info['resources'], data[5])
|
||||
self.assertEqual(self._plan_info['status'], data[6])
|
@ -1,163 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from karborclient.osc.v1 import protectables as osc_protectables
|
||||
from karborclient.tests.unit.osc.v1 import fakes
|
||||
from karborclient.v1 import protectables
|
||||
|
||||
|
||||
PROTECTABLE_LIST_INFO = {
|
||||
"protectable_type": [
|
||||
"OS::Keystone::Project",
|
||||
"OS::Cinder::Volume",
|
||||
"OS::Glance::Image",
|
||||
"OS::Nova::Server"
|
||||
]
|
||||
}
|
||||
|
||||
PROTECTABLE_SHOW_INFO = {
|
||||
"name": "OS::Nova::Server",
|
||||
"dependent_types": [
|
||||
"OS::Cinder::Volume",
|
||||
"OS::Glance::Image"
|
||||
]
|
||||
}
|
||||
|
||||
PROTECTABLE_INSTANCE_LIST_INFO = {
|
||||
"id": "25336116-f38e-4c22-81ad-e9b7bd71ba51",
|
||||
"type": "OS::Cinder::Volume",
|
||||
"name": "System volume",
|
||||
"extra_info": {
|
||||
"availability_zone": "az1"
|
||||
}
|
||||
}
|
||||
|
||||
PROTECTABLE_INSTANCE_SHOW_INFO = {
|
||||
"id": "cb4ef2ff-10f5-46c9-bce4-cf7a49c65a01",
|
||||
"type": "OS::Nova::Server",
|
||||
"name": "My VM",
|
||||
"dependent_resources": [{
|
||||
"id": "99777fdd-8a5b-45ab-ba2c-52420008103f",
|
||||
"type": "OS::Glance::Image",
|
||||
"name": "cirros-0.3.4-x86_64-uec"}]
|
||||
}
|
||||
|
||||
|
||||
class TestProtectables(fakes.TestDataProtection):
|
||||
def setUp(self):
|
||||
super(TestProtectables, self).setUp()
|
||||
cm = self.app.client_manager
|
||||
self.protectables_mock = cm.data_protection.protectables
|
||||
self.protectables_mock.reset_mock()
|
||||
|
||||
|
||||
class TestListProtectables(TestProtectables):
|
||||
def setUp(self):
|
||||
super(TestListProtectables, self).setUp()
|
||||
self.protectables_mock.list.return_value = [protectables.Protectable(
|
||||
None, copy.deepcopy(PROTECTABLE_LIST_INFO))]
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_protectables.ListProtectables(self.app, None)
|
||||
|
||||
def test_protectables_list(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
['Protectable type'])
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
expected_data = [(['OS::Keystone::Project',
|
||||
'OS::Cinder::Volume',
|
||||
'OS::Glance::Image',
|
||||
'OS::Nova::Server'],)]
|
||||
self.assertEqual(expected_data, list(data))
|
||||
|
||||
|
||||
class TestShowProtectable(TestProtectables):
|
||||
def setUp(self):
|
||||
super(TestShowProtectable, self).setUp()
|
||||
self.protectables_mock.get.return_value = protectables.Protectable(
|
||||
None, copy.deepcopy(PROTECTABLE_SHOW_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_protectables.ShowProtectable(self.app, None)
|
||||
|
||||
def test_protectable_show(self):
|
||||
arglist = ['OS::Nova::Server']
|
||||
verifylist = [('protectable_type', 'OS::Nova::Server')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.protectables_mock.get.assert_called_once_with(
|
||||
'OS::Nova::Server')
|
||||
|
||||
|
||||
class TestListProtectableInstances(TestProtectables):
|
||||
def setUp(self):
|
||||
super(TestListProtectableInstances, self).setUp()
|
||||
pm = self.protectables_mock
|
||||
pm.list_instances.return_value = [protectables.Instances(
|
||||
None, copy.deepcopy(PROTECTABLE_INSTANCE_LIST_INFO)), ]
|
||||
# Command to test
|
||||
self.cmd = osc_protectables.ListProtectableInstances(self.app, None)
|
||||
|
||||
def test_protectable_instances_list(self):
|
||||
arglist = ['OS::Cinder::Volume']
|
||||
verifylist = [('protectable_type', 'OS::Cinder::Volume')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.protectables_mock.list_instances.assert_called_once_with(
|
||||
'OS::Cinder::Volume', limit=None, marker=None,
|
||||
search_opts={'type': None, 'parameters': None},
|
||||
sort=None)
|
||||
|
||||
|
||||
class TestShowProtectableInstance(TestProtectables):
|
||||
def setUp(self):
|
||||
super(TestShowProtectableInstance, self).setUp()
|
||||
pm = self.protectables_mock
|
||||
pm.get_instance.return_value = protectables.Instances(
|
||||
None, copy.deepcopy(PROTECTABLE_INSTANCE_SHOW_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_protectables.ShowProtectableInstance(self.app, None)
|
||||
|
||||
def test_protectable_instance_show(self):
|
||||
arglist = ['OS::Nova::Server', 'cb4ef2ff-10f5-46c9-bce4-cf7a49c65a01']
|
||||
verifylist = [('protectable_type', 'OS::Nova::Server'),
|
||||
('protectable_id',
|
||||
'cb4ef2ff-10f5-46c9-bce4-cf7a49c65a01')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.protectables_mock.get_instance.assert_called_once_with(
|
||||
'OS::Nova::Server', 'cb4ef2ff-10f5-46c9-bce4-cf7a49c65a01',
|
||||
search_opts={'parameters': None})
|
@ -1,144 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from karborclient.osc.v1 import providers as osc_providers
|
||||
from karborclient.tests.unit.osc.v1 import fakes
|
||||
from karborclient.v1 import providers
|
||||
|
||||
|
||||
PROVIDER_INFO = {
|
||||
"id": "2220f8b1-975d-4621-a872-fa9afb43cb6c",
|
||||
"name": "OS Infra Provider",
|
||||
"description": "provider description",
|
||||
"extended_info_schema": {
|
||||
"options_schema": {
|
||||
"OS::Cinder::Volume": {
|
||||
"required": [
|
||||
"backup_mode"
|
||||
],
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"backup_mode": {
|
||||
"default": "auto",
|
||||
"enum": [
|
||||
"full",
|
||||
"incremental",
|
||||
"auto"
|
||||
],
|
||||
"type": "string",
|
||||
"description": "The backup mode.",
|
||||
"title": "Backup Mode"
|
||||
}
|
||||
},
|
||||
"title": "Cinder Protection Options"
|
||||
}
|
||||
},
|
||||
"saved_info_schema": {
|
||||
"OS::Cinder::Volume": {
|
||||
"required": [
|
||||
"name"
|
||||
],
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "The name for this backup.",
|
||||
"title": "Name"
|
||||
}
|
||||
},
|
||||
"title": "Cinder Protection Saved Info"
|
||||
}
|
||||
},
|
||||
"restore_schema": {
|
||||
"OS::Cinder::Volume": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"restore_name": {
|
||||
"type": "string",
|
||||
"description": "The name of the restored volume.",
|
||||
"title": "Restore Name"
|
||||
}
|
||||
},
|
||||
"title": "Cinder Protection Restore"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class TestProviders(fakes.TestDataProtection):
|
||||
def setUp(self):
|
||||
super(TestProviders, self).setUp()
|
||||
self.providers_mock = self.app.client_manager.data_protection.providers
|
||||
self.providers_mock.reset_mock()
|
||||
|
||||
|
||||
class TestListProviders(TestProviders):
|
||||
def setUp(self):
|
||||
super(TestListProviders, self).setUp()
|
||||
self.providers_mock.list.return_value = [providers.Provider(
|
||||
None, copy.deepcopy(PROVIDER_INFO))]
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_providers.ListProviders(self.app, None)
|
||||
|
||||
def test_providers_list(self):
|
||||
arglist = ['--name', 'OS Infra Provider']
|
||||
verifylist = [('name', 'OS Infra Provider')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
['Id', 'Name', 'Description'])
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
expected_data = [("2220f8b1-975d-4621-a872-fa9afb43cb6c",
|
||||
"OS Infra Provider",
|
||||
"provider description")]
|
||||
self.assertEqual(expected_data, list(data))
|
||||
|
||||
|
||||
class TestShowProvider(TestProviders):
|
||||
def setUp(self):
|
||||
super(TestShowProvider, self).setUp()
|
||||
self._provider_info = copy.deepcopy(PROVIDER_INFO)
|
||||
self.providers_mock.get.return_value = providers.Provider(
|
||||
None, self._provider_info)
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_providers.ShowProvider(self.app, None)
|
||||
|
||||
def test_provider_show(self):
|
||||
arglist = ['2220f8b1-975d-4621-a872-fa9afb43cb6c']
|
||||
verifylist = [('provider', '2220f8b1-975d-4621-a872-fa9afb43cb6c')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
'description', 'extended_info_schema', 'id', 'name')
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
self.assertEqual(self._provider_info['description'], data[0])
|
||||
self.assertEqual(self._provider_info['extended_info_schema'], data[1])
|
||||
self.assertEqual(self._provider_info['id'], data[2])
|
||||
self.assertEqual(self._provider_info['name'], data[3])
|
@ -1,77 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from karborclient.osc.v1 import quota_classes as osc_quota_classes
|
||||
from karborclient.tests.unit.osc.v1 import fakes
|
||||
from karborclient.v1 import quota_classes
|
||||
|
||||
|
||||
QUOTA_CLASSES_INFO = {
|
||||
"id": "default",
|
||||
"plans": "40"
|
||||
}
|
||||
|
||||
|
||||
class TestQuotaClasses(fakes.TestDataProtection):
|
||||
def setUp(self):
|
||||
super(TestQuotaClasses, self).setUp()
|
||||
self.quotas_mock = (
|
||||
self.app.client_manager.data_protection.quota_classes)
|
||||
self.quotas_mock.reset_mock()
|
||||
|
||||
|
||||
class TestUpdateQuotaClasses(TestQuotaClasses):
|
||||
def setUp(self):
|
||||
super(TestUpdateQuotaClasses, self).setUp()
|
||||
self.quotas_mock.update.return_value = quota_classes.QuotaClass(
|
||||
None, copy.deepcopy(QUOTA_CLASSES_INFO))
|
||||
self.cmd = osc_quota_classes.UpdateQuotaClasses(self.app, None)
|
||||
|
||||
def test_quota_classes_update(self):
|
||||
arglist = ['--plans',
|
||||
'40', 'default']
|
||||
verifylist = [('plans', 40),
|
||||
('class_name',
|
||||
'default')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.quotas_mock.update.assert_called_once_with(
|
||||
'default',
|
||||
{'plans': 40})
|
||||
|
||||
|
||||
class TestShowQuotaClasses(TestQuotaClasses):
|
||||
def setUp(self):
|
||||
super(TestShowQuotaClasses, self).setUp()
|
||||
self._quota_info = copy.deepcopy(QUOTA_CLASSES_INFO)
|
||||
self.quotas_mock.get.return_value = quota_classes.QuotaClass(
|
||||
None, copy.deepcopy(QUOTA_CLASSES_INFO))
|
||||
|
||||
self.cmd = osc_quota_classes.ShowQuotaClasses(self.app, None)
|
||||
|
||||
def test_quota_classes_show(self):
|
||||
arglist = ['default']
|
||||
verifylist = [('class_name', 'default')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
self.assertEqual(('id', 'plans'), columns)
|
||||
self.assertEqual(('default', '40'),
|
||||
data)
|
@ -1,76 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from karborclient.osc.v1 import quotas as osc_quotas
|
||||
from karborclient.tests.unit.osc.v1 import fakes
|
||||
from karborclient.v1 import quotas
|
||||
|
||||
|
||||
QUOTA_INFO = {
|
||||
"id": "73f74f90a1754bd7ad658afb3272323f",
|
||||
"plans": "40"
|
||||
}
|
||||
|
||||
|
||||
class TestQuotas(fakes.TestDataProtection):
|
||||
def setUp(self):
|
||||
super(TestQuotas, self).setUp()
|
||||
self.quotas_mock = self.app.client_manager.data_protection.quotas
|
||||
self.quotas_mock.reset_mock()
|
||||
|
||||
|
||||
class TestUpdateQuotas(TestQuotas):
|
||||
def setUp(self):
|
||||
super(TestUpdateQuotas, self).setUp()
|
||||
self.quotas_mock.update.return_value = quotas.Quota(
|
||||
None, copy.deepcopy(QUOTA_INFO))
|
||||
self.cmd = osc_quotas.UpdateQuotas(self.app, None)
|
||||
|
||||
def test_quotas_update(self):
|
||||
arglist = ['--plans',
|
||||
'40', '73f74f90a1754bd7ad658afb3272323f']
|
||||
verifylist = [('plans', 40),
|
||||
('tenant',
|
||||
'73f74f90a1754bd7ad658afb3272323f')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.quotas_mock.update.assert_called_once_with(
|
||||
'73f74f90a1754bd7ad658afb3272323f',
|
||||
{'plans': 40})
|
||||
|
||||
|
||||
class TestShowQuotas(TestQuotas):
|
||||
def setUp(self):
|
||||
super(TestShowQuotas, self).setUp()
|
||||
self._quota_info = copy.deepcopy(QUOTA_INFO)
|
||||
self.quotas_mock.get.return_value = quotas.Quota(
|
||||
None, copy.deepcopy(QUOTA_INFO))
|
||||
|
||||
self.cmd = osc_quotas.ShowQuotas(self.app, None)
|
||||
|
||||
def test_quota_show(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
self.assertEqual(('id', 'plans'), columns)
|
||||
self.assertEqual(('73f74f90a1754bd7ad658afb3272323f', '40'),
|
||||
data)
|
@ -1,139 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from karborclient.osc.v1 import restores as osc_restores
|
||||
from karborclient.tests.unit.osc.v1 import fakes
|
||||
from karborclient.v1 import restores
|
||||
|
||||
|
||||
RESTORE_INFO = {
|
||||
"id": "22b82aa7-9179-4c71-bba2-caf5c0e68db7",
|
||||
"project_id": "e486a2f49695423ca9c47e589b948108",
|
||||
"provider_id": "cf56bd3e-97a7-4078-b6d5-f36246333fd9",
|
||||
"checkpoint_id": "dcb20606-ad71-40a3-80e4-ef0fafdad0c3",
|
||||
"restore_target": "",
|
||||
"parameters": {},
|
||||
"restore_auth": {},
|
||||
"resources_status": {},
|
||||
"resources_reason": {},
|
||||
"status": "success"
|
||||
}
|
||||
|
||||
|
||||
class TestRestores(fakes.TestDataProtection):
|
||||
def setUp(self):
|
||||
super(TestRestores, self).setUp()
|
||||
self.restores_mock = self.app.client_manager.data_protection.restores
|
||||
self.restores_mock.reset_mock()
|
||||
|
||||
|
||||
class TestListRestores(TestRestores):
|
||||
def setUp(self):
|
||||
super(TestListRestores, self).setUp()
|
||||
self.restores_mock.list.return_value = [restores.Restore(
|
||||
None, copy.deepcopy(RESTORE_INFO))]
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_restores.ListRestores(self.app, None)
|
||||
|
||||
def test_restores_list(self):
|
||||
arglist = ['--status', 'success']
|
||||
verifylist = [('status', 'success')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
['Id', 'Project id', 'Provider id', 'Checkpoint id',
|
||||
'Restore target', 'Parameters', 'Status'])
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
expected_data = [("22b82aa7-9179-4c71-bba2-caf5c0e68db7",
|
||||
"e486a2f49695423ca9c47e589b948108",
|
||||
"cf56bd3e-97a7-4078-b6d5-f36246333fd9",
|
||||
"dcb20606-ad71-40a3-80e4-ef0fafdad0c3",
|
||||
"",
|
||||
jsonutils.dumps({}),
|
||||
"success")]
|
||||
self.assertEqual(expected_data, list(data))
|
||||
|
||||
|
||||
class TestCreateRestore(TestRestores):
|
||||
def setUp(self):
|
||||
super(TestCreateRestore, self).setUp()
|
||||
self.restores_mock.create.return_value = restores.Restore(
|
||||
None, copy.deepcopy(RESTORE_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_restores.CreateRestore(self.app, None)
|
||||
|
||||
def test_restore_create(self):
|
||||
arglist = ['cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3']
|
||||
verifylist = [('provider_id', 'cf56bd3e-97a7-4078-b6d5-f36246333fd9'),
|
||||
('checkpoint_id',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.restores_mock.create.assert_called_once_with(
|
||||
'cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3',
|
||||
None, {}, None)
|
||||
|
||||
|
||||
class TestShowRestore(TestRestores):
|
||||
def setUp(self):
|
||||
super(TestShowRestore, self).setUp()
|
||||
self._restore_info = copy.deepcopy(RESTORE_INFO)
|
||||
self.restores_mock.get.return_value = restores.Restore(
|
||||
None, self._restore_info)
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_restores.ShowRestore(self.app, None)
|
||||
|
||||
def test_restore_show(self):
|
||||
arglist = ['22b82aa7-9179-4c71-bba2-caf5c0e68db7']
|
||||
verifylist = [('restore', '22b82aa7-9179-4c71-bba2-caf5c0e68db7')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
'checkpoint_id', 'id', 'parameters', 'project_id',
|
||||
'provider_id', 'resources_reason', 'resources_status',
|
||||
'restore_auth', 'restore_target', 'status')
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
self.assertEqual(self._restore_info['checkpoint_id'], data[0])
|
||||
self.assertEqual(self._restore_info['id'], data[1])
|
||||
self.assertEqual(self._restore_info['parameters'], data[2])
|
||||
self.assertEqual(self._restore_info['project_id'], data[3])
|
||||
self.assertEqual(self._restore_info['provider_id'], data[4])
|
||||
self.assertEqual(self._restore_info['resources_reason'], data[5])
|
||||
self.assertEqual(self._restore_info['resources_status'], data[6])
|
||||
self.assertEqual(self._restore_info['restore_auth'], data[7])
|
||||
self.assertEqual(self._restore_info['restore_target'], data[8])
|
||||
self.assertEqual(self._restore_info['status'], data[9])
|
@ -1,173 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
import json
|
||||
|
||||
from karborclient.osc.v1 import scheduled_operations as osc_so
|
||||
from karborclient.tests.unit.osc.v1 import fakes
|
||||
from karborclient.v1 import scheduled_operations
|
||||
|
||||
|
||||
SCHEDULEDOPERATION_INFO = {
|
||||
"id": "1a2c0c3d-f402-4cd8-b5db-82e85cb51fad",
|
||||
"name": "My scheduled operation",
|
||||
"description": "It will run everyday",
|
||||
"operation_type": "protect",
|
||||
"trigger_id": "23902b02-5666-4ee6-8dfe-962ac09c3995",
|
||||
"operation_definition": {
|
||||
"provider_id": "2a9ce1f3-cc1a-4516-9435-0ebb13caa399",
|
||||
"plan_id": "2a9ce1f3-cc1a-4516-9435-0ebb13caa398"
|
||||
},
|
||||
"enabled": 1
|
||||
}
|
||||
|
||||
|
||||
class TestScheduledOperations(fakes.TestDataProtection):
|
||||
def setUp(self):
|
||||
super(TestScheduledOperations, self).setUp()
|
||||
self.so_mock = self.app.client_manager.data_protection.\
|
||||
scheduled_operations
|
||||
self.so_mock.reset_mock()
|
||||
|
||||
|
||||
class TestListScheduledOperations(TestScheduledOperations):
|
||||
def setUp(self):
|
||||
super(TestListScheduledOperations, self).setUp()
|
||||
self.so_mock.list.return_value = [
|
||||
scheduled_operations.ScheduledOperation(
|
||||
None, copy.deepcopy(SCHEDULEDOPERATION_INFO))
|
||||
]
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_so.ListScheduledOperations(self.app, None)
|
||||
|
||||
def test_scheduled_operations_list(self):
|
||||
arglist = ['--name', 'My scheduled operation']
|
||||
verifylist = [('name', 'My scheduled operation')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
['Id', 'Name', 'Operation Type', 'Trigger Id',
|
||||
'Operation Definition'])
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
operation_definition = {
|
||||
"provider_id": "2a9ce1f3-cc1a-4516-9435-0ebb13caa399",
|
||||
"plan_id": "2a9ce1f3-cc1a-4516-9435-0ebb13caa398"
|
||||
}
|
||||
|
||||
# Check that data is correct
|
||||
expected_data = [("1a2c0c3d-f402-4cd8-b5db-82e85cb51fad",
|
||||
"My scheduled operation",
|
||||
"protect",
|
||||
"23902b02-5666-4ee6-8dfe-962ac09c3995",
|
||||
json.dumps(operation_definition,
|
||||
indent=2, sort_keys=True)
|
||||
)]
|
||||
self.assertEqual(expected_data, data)
|
||||
|
||||
|
||||
class TestCreateScheduledOperation(TestScheduledOperations):
|
||||
def setUp(self):
|
||||
super(TestCreateScheduledOperation, self).setUp()
|
||||
self.so_mock.create.return_value = scheduled_operations.\
|
||||
ScheduledOperation(None, copy.deepcopy(SCHEDULEDOPERATION_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_so.CreateScheduledOperation(self.app, None)
|
||||
|
||||
def test_scheduled_operation_create(self):
|
||||
arglist = ['My scheduled operation',
|
||||
'protect',
|
||||
"23902b02-5666-4ee6-8dfe-962ac09c3995",
|
||||
"'provider_id=2a9ce1f3-cc1a-4516-9435-0ebb13caa399,"
|
||||
"plan_id=2a9ce1f3-cc1a-4516-9435-0ebb13caa398'"]
|
||||
verifylist = [('name', 'My scheduled operation'),
|
||||
('operation_type', 'protect'),
|
||||
('trigger_id', "23902b02-5666-4ee6-8dfe-962ac09c3995"),
|
||||
('operation_definition',
|
||||
"'provider_id=2a9ce1f3-cc1a-4516-9435-0ebb13caa399,"
|
||||
"plan_id=2a9ce1f3-cc1a-4516-9435-0ebb13caa398'")]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.so_mock.create.assert_called_once_with(
|
||||
'My scheduled operation',
|
||||
'protect',
|
||||
'23902b02-5666-4ee6-8dfe-962ac09c3995',
|
||||
"'provider_id=2a9ce1f3-cc1a-4516-9435-0ebb13caa399,"
|
||||
"plan_id=2a9ce1f3-cc1a-4516-9435-0ebb13caa398'")
|
||||
|
||||
|
||||
class TestDeleteScheduledOperation(TestScheduledOperations):
|
||||
def setUp(self):
|
||||
super(TestDeleteScheduledOperation, self).setUp()
|
||||
self.so_mock.get.return_value = scheduled_operations.\
|
||||
ScheduledOperation(None, copy.deepcopy(SCHEDULEDOPERATION_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_so.DeleteScheduledOperation(self.app, None)
|
||||
|
||||
def test_scheduled_operation_delete(self):
|
||||
arglist = ['1a2c0c3d-f402-4cd8-b5db-82e85cb51fad']
|
||||
verifylist = [('scheduledoperation',
|
||||
['1a2c0c3d-f402-4cd8-b5db-82e85cb51fad'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.so_mock.delete.assert_called_once_with(
|
||||
'1a2c0c3d-f402-4cd8-b5db-82e85cb51fad')
|
||||
|
||||
|
||||
class TestShowScheduledOperation(TestScheduledOperations):
|
||||
def setUp(self):
|
||||
super(TestShowScheduledOperation, self).setUp()
|
||||
self._schedop_info = copy.deepcopy(SCHEDULEDOPERATION_INFO)
|
||||
self.so_mock.get.return_value = scheduled_operations.\
|
||||
ScheduledOperation(None, self._schedop_info)
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_so.ShowScheduledOperation(self.app, None)
|
||||
|
||||
def test_scheduled_operation_show(self):
|
||||
arglist = ['1a2c0c3d-f402-4cd8-b5db-82e85cb51fad']
|
||||
verifylist = [('scheduledoperation',
|
||||
'1a2c0c3d-f402-4cd8-b5db-82e85cb51fad')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
'description', 'enabled', 'id', 'name', 'operation_definition',
|
||||
'operation_type', 'trigger_id')
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
self.assertEqual(self._schedop_info['description'], data[0])
|
||||
self.assertEqual(self._schedop_info['enabled'], data[1])
|
||||
self.assertEqual(self._schedop_info['id'], data[2])
|
||||
self.assertEqual(self._schedop_info['name'], data[3])
|
||||
self.assertEqual(self._schedop_info['operation_definition'], data[4])
|
||||
self.assertEqual(self._schedop_info['operation_type'], data[5])
|
||||
self.assertEqual(self._schedop_info['trigger_id'], data[6])
|
@ -1,101 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import copy
|
||||
|
||||
from karborclient.osc.v1 import services as osc_services
|
||||
from karborclient.tests.unit.osc.v1 import fakes
|
||||
from karborclient.v1 import services
|
||||
|
||||
SERVICE_INFO = {
|
||||
"status": "enabled",
|
||||
"binary": "karbor-operationengine",
|
||||
"state": "up",
|
||||
"updated_at": "2017-10-25T07:06:58.000000",
|
||||
"host": "fake_host",
|
||||
"disabled_reason": None,
|
||||
"id": 1
|
||||
}
|
||||
|
||||
|
||||
class TestServices(fakes.TestDataProtection):
|
||||
def setUp(self):
|
||||
super(TestServices, self).setUp()
|
||||
self.services_mock = self.app.client_manager.data_protection.services
|
||||
self.services_mock.reset_mock()
|
||||
|
||||
|
||||
class TestListServices(TestServices):
|
||||
def setUp(self):
|
||||
super(TestListServices, self).setUp()
|
||||
self.services_mock.list.return_value = [
|
||||
services.Service(None, copy.deepcopy(SERVICE_INFO))]
|
||||
self.cmd = osc_services.ListServices(self.app, None)
|
||||
|
||||
def test_services_list(self):
|
||||
arg_list = ['--host', 'fake_host']
|
||||
verify_list = [('host', 'fake_host')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arg_list, verify_list)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
expected_columns = (["Id", "Binary", "Host", "Status", "State",
|
||||
"Updated_at", "Disabled Reason"])
|
||||
self.assertEqual(expected_columns, columns)
|
||||
expected_data = [(1,
|
||||
"karbor-operationengine",
|
||||
"fake_host",
|
||||
"enabled",
|
||||
"up",
|
||||
"2017-10-25T07:06:58.000000",
|
||||
None
|
||||
)]
|
||||
self.assertEqual(expected_data, list(data))
|
||||
|
||||
|
||||
class TestEnableService(TestServices):
|
||||
def setUp(self):
|
||||
super(TestEnableService, self).setUp()
|
||||
self.services_mock.enable.return_value = services.Service(
|
||||
None, copy.deepcopy(SERVICE_INFO))
|
||||
self.cmd = osc_services.EnableService(self.app, None)
|
||||
|
||||
def test_enable_service(self):
|
||||
arg_list = ['1']
|
||||
verify_list = [('service_id', '1')]
|
||||
parsed_args = self.check_parser(self.cmd, arg_list, verify_list)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.services_mock.enable.assert_called_once_with('1')
|
||||
|
||||
|
||||
class TestDisableService(TestServices):
|
||||
def setUp(self):
|
||||
super(TestDisableService, self).setUp()
|
||||
self.services_mock.disable.return_value = services.Service(
|
||||
None, copy.deepcopy(SERVICE_INFO))
|
||||
self.services_mock.disable_log_reason.return_value = services.Service(
|
||||
None, copy.deepcopy(SERVICE_INFO))
|
||||
self.cmd = osc_services.DisableService(self.app, None)
|
||||
|
||||
def test_disable_service(self):
|
||||
arg_list = ['1']
|
||||
verify_list = [('service_id', '1')]
|
||||
parsed_args = self.check_parser(self.cmd, arg_list, verify_list)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.services_mock.disable.assert_called_once_with('1')
|
||||
|
||||
def test_disable_service_with_reason(self):
|
||||
arg_list = ['1', '--reason', 'fake_reason']
|
||||
verify_list = [('service_id', '1'), ('reason', 'fake_reason')]
|
||||
parsed_args = self.check_parser(self.cmd, arg_list, verify_list)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.services_mock.disable_log_reason.assert_called_once_with(
|
||||
'1', 'fake_reason')
|
@ -1,189 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from karborclient.osc.v1 import triggers as osc_triggers
|
||||
from karborclient.tests.unit.osc.v1 import fakes
|
||||
from karborclient.v1 import triggers
|
||||
|
||||
|
||||
TRIGGER_INFO = {
|
||||
"id": "2a9ce1f3-cc1a-4516-9435-0ebb13caa398",
|
||||
"name": "My backup trigger",
|
||||
"type": "time",
|
||||
"properties": {
|
||||
"format": "calendar",
|
||||
"pattern": "BEGIN:VEVENT\\nRRULE:FREQ=HOURLY;INTERVAL=1;\\nEND:VEVENT",
|
||||
"start_time": "2015-12-17T08:30:00",
|
||||
"end_time": "2016-03-17T08:30:00",
|
||||
"window": "3600"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class TestTriggers(fakes.TestDataProtection):
|
||||
def setUp(self):
|
||||
super(TestTriggers, self).setUp()
|
||||
self.triggers_mock = self.app.client_manager.data_protection.triggers
|
||||
self.triggers_mock.reset_mock()
|
||||
|
||||
|
||||
class TestListTriggers(TestTriggers):
|
||||
def setUp(self):
|
||||
super(TestListTriggers, self).setUp()
|
||||
self.triggers_mock.list.return_value = [triggers.Trigger(
|
||||
None, copy.deepcopy(TRIGGER_INFO))]
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_triggers.ListTriggers(self.app, None)
|
||||
|
||||
def test_triggers_list(self):
|
||||
arglist = ['--name', 'My backup trigger']
|
||||
verifylist = [('name', 'My backup trigger')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
['Id', 'Name', 'Type', 'Properties'])
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
expected_data = [("2a9ce1f3-cc1a-4516-9435-0ebb13caa398",
|
||||
"My backup trigger",
|
||||
"time",
|
||||
{"format": "calendar",
|
||||
"pattern": "BEGIN:VEVENT\\nRRULE:FREQ=HOURLY;INTERVAL=1;\\nEND:VEVENT", # noqa
|
||||
"start_time": "2015-12-17T08:30:00",
|
||||
"end_time": "2016-03-17T08:30:00",
|
||||
"window": "3600"})]
|
||||
self.assertEqual(expected_data, list(data))
|
||||
|
||||
|
||||
class TestCreateTrigger(TestTriggers):
|
||||
def setUp(self):
|
||||
super(TestCreateTrigger, self).setUp()
|
||||
self.triggers_mock.create.return_value = triggers.Trigger(
|
||||
None, copy.deepcopy(TRIGGER_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_triggers.CreateTrigger(self.app, None)
|
||||
|
||||
def test_trigger_create(self):
|
||||
arglist = ['My backup trigger',
|
||||
'time',
|
||||
"'format'='calendar',"
|
||||
"'pattern'='BEGIN:VEVENT\\nRRULE:FREQ=HOURLY;INTERVAL=1;\\nEND:VEVENT'," # noqa
|
||||
"'start_time'='2015-12-17T08:30:00',"
|
||||
"'end_time'='2016-03-17T08:30:00',"
|
||||
"'window'='3600'"]
|
||||
verifylist = [('name', 'My backup trigger'),
|
||||
('type', 'time'),
|
||||
('properties', "'format'='calendar',"
|
||||
"'pattern'='BEGIN:VEVENT\\nRRULE:FREQ=HOURLY;INTERVAL=1;\\nEND:VEVENT'," # noqa
|
||||
"'start_time'='2015-12-17T08:30:00',"
|
||||
"'end_time'='2016-03-17T08:30:00',"
|
||||
"'window'='3600'")]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.triggers_mock.create.assert_called_once_with(
|
||||
'My backup trigger',
|
||||
'time',
|
||||
"'format'='calendar',"
|
||||
"'pattern'='BEGIN:VEVENT\\nRRULE:FREQ=HOURLY;INTERVAL=1;\\nEND:VEVENT'," # noqa
|
||||
"'start_time'='2015-12-17T08:30:00',"
|
||||
"'end_time'='2016-03-17T08:30:00',"
|
||||
"'window'='3600'")
|
||||
|
||||
|
||||
class TestUpdateTrigger(TestTriggers):
|
||||
def setUp(self):
|
||||
super(TestUpdateTrigger, self).setUp()
|
||||
self.triggers_mock.get.return_value = triggers.Trigger(
|
||||
None, copy.deepcopy(TRIGGER_INFO))
|
||||
self.triggers_mock.update.return_value = triggers.Trigger(
|
||||
None, copy.deepcopy(TRIGGER_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_triggers.UpdateTrigger(self.app, None)
|
||||
|
||||
def test_trigger_update(self):
|
||||
arglist = ['2a9ce1f3-cc1a-4516-9435-0ebb13caa398',
|
||||
'--name', 'My backup trigger']
|
||||
verifylist = [('trigger_id', '2a9ce1f3-cc1a-4516-9435-0ebb13caa398'),
|
||||
('name', 'My backup trigger')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.triggers_mock.update.assert_called_once_with(
|
||||
'2a9ce1f3-cc1a-4516-9435-0ebb13caa398',
|
||||
{'name': 'My backup trigger'})
|
||||
|
||||
|
||||
class TestDeleteTrigger(TestTriggers):
|
||||
def setUp(self):
|
||||
super(TestDeleteTrigger, self).setUp()
|
||||
self.triggers_mock.get.return_value = triggers.Trigger(
|
||||
None, copy.deepcopy(TRIGGER_INFO))
|
||||
# Command to test
|
||||
self.cmd = osc_triggers.DeleteTrigger(self.app, None)
|
||||
|
||||
def test_trigger_delete(self):
|
||||
arglist = ['2a9ce1f3-cc1a-4516-9435-0ebb13caa398']
|
||||
verifylist = [('trigger', ['2a9ce1f3-cc1a-4516-9435-0ebb13caa398'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.triggers_mock.delete.assert_called_once_with(
|
||||
'2a9ce1f3-cc1a-4516-9435-0ebb13caa398')
|
||||
|
||||
|
||||
class TestShowTrigger(TestTriggers):
|
||||
def setUp(self):
|
||||
super(TestShowTrigger, self).setUp()
|
||||
self._trigger_info = copy.deepcopy(TRIGGER_INFO)
|
||||
self.triggers_mock.get.return_value = triggers.Trigger(
|
||||
None, self._trigger_info)
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_triggers.ShowTrigger(self.app, None)
|
||||
|
||||
def test_trigger_show(self):
|
||||
arglist = ['2a9ce1f3-cc1a-4516-9435-0ebb13caa398']
|
||||
verifylist = [('trigger', '2a9ce1f3-cc1a-4516-9435-0ebb13caa398')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = (
|
||||
'id', 'name', 'properties', 'type')
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
self.assertEqual(self._trigger_info['id'], data[0])
|
||||
self.assertEqual(self._trigger_info['name'], data[1])
|
||||
self.assertEqual(self._trigger_info['properties'], data[2])
|
||||
self.assertEqual(self._trigger_info['type'], data[3])
|
@ -1,130 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from karborclient.osc.v1 import verifications as osc_verifications
|
||||
from karborclient.tests.unit.osc.v1 import fakes
|
||||
from karborclient.v1 import verifications
|
||||
|
||||
|
||||
VERIFICATION_INFO = {
|
||||
"id": "22b82aa7-9179-4c71-bba2-caf5c0e68db7",
|
||||
"project_id": "e486a2f49695423ca9c47e589b948108",
|
||||
"provider_id": "cf56bd3e-97a7-4078-b6d5-f36246333fd9",
|
||||
"checkpoint_id": "dcb20606-ad71-40a3-80e4-ef0fafdad0c3",
|
||||
"parameters": {},
|
||||
"resources_status": {},
|
||||
"resources_reason": {},
|
||||
"status": "success"
|
||||
}
|
||||
|
||||
|
||||
class TestVerifications(fakes.TestDataProtection):
|
||||
def setUp(self):
|
||||
super(TestVerifications, self).setUp()
|
||||
self.verifications_mock = (
|
||||
self.app.client_manager.data_protection.verifications)
|
||||
self.verifications_mock.reset_mock()
|
||||
|
||||
|
||||
class TestListVerifications(TestVerifications):
|
||||
def setUp(self):
|
||||
super(TestListVerifications, self).setUp()
|
||||
self.verifications_mock.list.return_value = (
|
||||
[verifications.Verification(
|
||||
None, copy.deepcopy(VERIFICATION_INFO))])
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_verifications.ListVerifications(self.app, None)
|
||||
|
||||
def test_verifications_list(self):
|
||||
arglist = ['--status', 'success']
|
||||
verifylist = [('status', 'success')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
expected_columns = (
|
||||
['Id', 'Project id', 'Provider id', 'Checkpoint id',
|
||||
'Parameters', 'Status'])
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
expected_data = [("22b82aa7-9179-4c71-bba2-caf5c0e68db7",
|
||||
"e486a2f49695423ca9c47e589b948108",
|
||||
"cf56bd3e-97a7-4078-b6d5-f36246333fd9",
|
||||
"dcb20606-ad71-40a3-80e4-ef0fafdad0c3",
|
||||
jsonutils.dumps({}),
|
||||
"success")]
|
||||
self.assertEqual(expected_data, list(data))
|
||||
|
||||
|
||||
class TestCreateVerification(TestVerifications):
|
||||
def setUp(self):
|
||||
super(TestCreateVerification, self).setUp()
|
||||
self.verifications_mock.create.return_value = (
|
||||
verifications.Verification(
|
||||
None, copy.deepcopy(VERIFICATION_INFO)))
|
||||
self.cmd = osc_verifications.CreateVerification(self.app, None)
|
||||
|
||||
def test_verification_create(self):
|
||||
arglist = ['cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3']
|
||||
verifylist = [('provider_id', 'cf56bd3e-97a7-4078-b6d5-f36246333fd9'),
|
||||
('checkpoint_id',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.verifications_mock.create.assert_called_once_with(
|
||||
'cf56bd3e-97a7-4078-b6d5-f36246333fd9',
|
||||
'dcb20606-ad71-40a3-80e4-ef0fafdad0c3', {})
|
||||
|
||||
|
||||
class TestShowVerification(TestVerifications):
|
||||
def setUp(self):
|
||||
super(TestShowVerification, self).setUp()
|
||||
self._verification_info = copy.deepcopy(VERIFICATION_INFO)
|
||||
self.verifications_mock.get.return_value = (
|
||||
verifications.Verification(None, self._verification_info))
|
||||
|
||||
self.cmd = osc_verifications.ShowVerification(self.app, None)
|
||||
|
||||
def test_verification_show(self):
|
||||
arglist = ['22b82aa7-9179-4c71-bba2-caf5c0e68db7']
|
||||
verifylist = [('verification', '22b82aa7-9179-4c71-bba2-caf5c0e68db7')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
expected_columns = (
|
||||
'checkpoint_id', 'id', 'parameters', 'project_id',
|
||||
'provider_id', 'resources_reason', 'resources_status',
|
||||
'status')
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
self.assertEqual(self._verification_info['checkpoint_id'], data[0])
|
||||
self.assertEqual(self._verification_info['id'], data[1])
|
||||
self.assertEqual(self._verification_info['parameters'], data[2])
|
||||
self.assertEqual(self._verification_info['project_id'], data[3])
|
||||
self.assertEqual(self._verification_info['provider_id'], data[4])
|
||||
self.assertEqual(self._verification_info['resources_reason'], data[5])
|
||||
self.assertEqual(self._verification_info['resources_status'], data[6])
|
||||
self.assertEqual(self._verification_info['status'], data[7])
|
@ -1,451 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import socket
|
||||
|
||||
import mock
|
||||
import testtools
|
||||
|
||||
from karborclient.common.apiclient import exceptions as exc
|
||||
from karborclient.common import http
|
||||
from karborclient.tests.unit import fakes
|
||||
|
||||
|
||||
@mock.patch('karborclient.common.http.requests.request')
|
||||
class HttpClientTest(testtools.TestCase):
|
||||
|
||||
# Patch os.environ to avoid required auth info.
|
||||
def setUp(self):
|
||||
super(HttpClientTest, self).setUp()
|
||||
|
||||
def test_http_raw_request(self, mock_request):
|
||||
headers = {'User-Agent': 'python-karborclient'}
|
||||
mock_request.return_value = \
|
||||
fakes.FakeHTTPResponse(
|
||||
200, 'OK',
|
||||
{},
|
||||
'')
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082')
|
||||
resp = client.raw_request('GET', '')
|
||||
self.assertEqual(200, resp.status_code)
|
||||
self.assertEqual('', ''.join([x for x in resp.content]))
|
||||
mock_request.assert_called_with('GET', 'http://example.com:8082',
|
||||
allow_redirects=False,
|
||||
headers=headers)
|
||||
|
||||
def test_token_or_credentials(self, mock_request):
|
||||
# Record a 200
|
||||
fake200 = fakes.FakeHTTPResponse(
|
||||
200, 'OK',
|
||||
{},
|
||||
'')
|
||||
|
||||
mock_request.side_effect = [fake200, fake200, fake200]
|
||||
|
||||
# Replay, create client, assert
|
||||
client = http.HTTPClient('http://example.com:8082')
|
||||
resp = client.raw_request('GET', '')
|
||||
self.assertEqual(200, resp.status_code)
|
||||
|
||||
client.username = 'user'
|
||||
client.password = 'pass'
|
||||
resp = client.raw_request('GET', '')
|
||||
self.assertEqual(200, resp.status_code)
|
||||
|
||||
client.auth_token = 'abcd1234'
|
||||
resp = client.raw_request('GET', '')
|
||||
self.assertEqual(200, resp.status_code)
|
||||
|
||||
# no token or credentials
|
||||
mock_request.assert_has_calls([
|
||||
mock.call('GET', 'http://example.com:8082',
|
||||
allow_redirects=False,
|
||||
headers={'User-Agent': 'python-karborclient'}),
|
||||
mock.call('GET', 'http://example.com:8082',
|
||||
allow_redirects=False,
|
||||
headers={'User-Agent': 'python-karborclient',
|
||||
'X-Auth-Key': 'pass',
|
||||
'X-Auth-User': 'user'}),
|
||||
mock.call('GET', 'http://example.com:8082',
|
||||
allow_redirects=False,
|
||||
headers={'User-Agent': 'python-karborclient',
|
||||
'X-Auth-Token': 'abcd1234'})
|
||||
])
|
||||
|
||||
def test_region_name(self, mock_request):
|
||||
# Record a 200
|
||||
fake200 = fakes.FakeHTTPResponse(
|
||||
200, 'OK',
|
||||
{},
|
||||
'')
|
||||
|
||||
mock_request.return_value = fake200
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082')
|
||||
client.region_name = 'RegionOne'
|
||||
resp = client.raw_request('GET', '')
|
||||
self.assertEqual(200, resp.status_code)
|
||||
|
||||
mock_request.assert_called_once_with(
|
||||
'GET', 'http://example.com:8082',
|
||||
allow_redirects=False,
|
||||
headers={'X-Region-Name': 'RegionOne',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
|
||||
def test_http_json_request(self, mock_request):
|
||||
# Record a 200
|
||||
mock_request.return_value = \
|
||||
fakes.FakeHTTPResponse(
|
||||
200, 'OK',
|
||||
{'content-type': 'application/json'},
|
||||
'{}')
|
||||
client = http.HTTPClient('http://example.com:8082')
|
||||
resp, body = client.json_request('GET', '')
|
||||
self.assertEqual(200, resp.status_code)
|
||||
self.assertEqual({}, body)
|
||||
|
||||
mock_request.assert_called_once_with(
|
||||
'GET', 'http://example.com:8082',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
|
||||
def test_http_json_request_argument_passed_to_requests(self, mock_request):
|
||||
"""Check that we have sent the proper arguments to requests."""
|
||||
# Record a 200
|
||||
mock_request.return_value = \
|
||||
fakes.FakeHTTPResponse(
|
||||
200, 'OK',
|
||||
{'content-type': 'application/json'},
|
||||
'{}')
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082')
|
||||
client.verify_cert = True
|
||||
client.cert_file = 'RANDOM_CERT_FILE'
|
||||
client.key_file = 'RANDOM_KEY_FILE'
|
||||
client.auth_url = 'http://AUTH_URL'
|
||||
resp, body = client.json_request('GET', '', data='text')
|
||||
self.assertEqual(200, resp.status_code)
|
||||
self.assertEqual({}, body)
|
||||
|
||||
mock_request.assert_called_once_with(
|
||||
'GET', 'http://example.com:8082',
|
||||
allow_redirects=False,
|
||||
cert=('RANDOM_CERT_FILE', 'RANDOM_KEY_FILE'),
|
||||
verify=True,
|
||||
data='"text"',
|
||||
headers={'Content-Type': 'application/json',
|
||||
'X-Auth-Url': 'http://AUTH_URL',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
|
||||
def test_http_json_request_w_req_body(self, mock_request):
|
||||
# Record a 200
|
||||
mock_request.return_value = \
|
||||
fakes.FakeHTTPResponse(
|
||||
200, 'OK',
|
||||
{'content-type': 'application/json'},
|
||||
'{}')
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082')
|
||||
resp, body = client.json_request('GET', '', data='test-body')
|
||||
self.assertEqual(200, resp.status_code)
|
||||
self.assertEqual({}, body)
|
||||
mock_request.assert_called_once_with(
|
||||
'GET', 'http://example.com:8082',
|
||||
data='"test-body"',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
|
||||
def test_http_json_request_non_json_resp_cont_type(self, mock_request):
|
||||
# Record a 200
|
||||
mock_request.return_value = \
|
||||
fakes.FakeHTTPResponse(
|
||||
200, 'OK',
|
||||
{'content-type': 'not/json'},
|
||||
'{}')
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082')
|
||||
resp, body = client.json_request('GET', '', data='test-data')
|
||||
self.assertEqual(200, resp.status_code)
|
||||
self.assertIsNone(body)
|
||||
mock_request.assert_called_once_with(
|
||||
'GET', 'http://example.com:8082', data='"test-data"',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
|
||||
def test_http_json_request_invalid_json(self, mock_request):
|
||||
# Record a 200
|
||||
mock_request.return_value = \
|
||||
fakes.FakeHTTPResponse(
|
||||
200, 'OK',
|
||||
{'content-type': 'application/json'},
|
||||
'invalid-json')
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082')
|
||||
resp, body = client.json_request('GET', '')
|
||||
self.assertEqual(200, resp.status_code)
|
||||
self.assertEqual('invalid-json', body)
|
||||
mock_request.assert_called_once_with(
|
||||
'GET', 'http://example.com:8082',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
|
||||
def test_http_manual_redirect_delete(self, mock_request):
|
||||
mock_request.side_effect = [
|
||||
fakes.FakeHTTPResponse(
|
||||
302, 'Found',
|
||||
{'location': 'http://example.com:8082/foo/bar'},
|
||||
''),
|
||||
fakes.FakeHTTPResponse(
|
||||
200, 'OK',
|
||||
{'content-type': 'application/json'},
|
||||
'{}')]
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082/foo')
|
||||
resp, body = client.json_request('DELETE', '')
|
||||
|
||||
self.assertEqual(200, resp.status_code)
|
||||
mock_request.assert_has_calls([
|
||||
mock.call('DELETE', 'http://example.com:8082/foo',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'}),
|
||||
mock.call('DELETE', 'http://example.com:8082/foo/bar',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
])
|
||||
|
||||
def test_http_manual_redirect_post(self, mock_request):
|
||||
mock_request.side_effect = [
|
||||
fakes.FakeHTTPResponse(
|
||||
302, 'Found',
|
||||
{'location': 'http://example.com:8082/foo/bar'},
|
||||
''),
|
||||
fakes.FakeHTTPResponse(
|
||||
200, 'OK',
|
||||
{'content-type': 'application/json'},
|
||||
'{}')]
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082/foo')
|
||||
resp, body = client.json_request('POST', '')
|
||||
|
||||
self.assertEqual(200, resp.status_code)
|
||||
mock_request.assert_has_calls([
|
||||
mock.call('POST', 'http://example.com:8082/foo',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'}),
|
||||
mock.call('POST', 'http://example.com:8082/foo/bar',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
])
|
||||
|
||||
def test_http_manual_redirect_put(self, mock_request):
|
||||
mock_request.side_effect = [
|
||||
fakes.FakeHTTPResponse(
|
||||
302, 'Found',
|
||||
{'location': 'http://example.com:8082/foo/bar'},
|
||||
''),
|
||||
fakes.FakeHTTPResponse(
|
||||
200, 'OK',
|
||||
{'content-type': 'application/json'},
|
||||
'{}')]
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082/foo')
|
||||
resp, body = client.json_request('PUT', '')
|
||||
|
||||
self.assertEqual(200, resp.status_code)
|
||||
mock_request.assert_has_calls([
|
||||
mock.call('PUT', 'http://example.com:8082/foo',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'}),
|
||||
mock.call('PUT', 'http://example.com:8082/foo/bar',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
])
|
||||
|
||||
def test_http_manual_redirect_prohibited(self, mock_request):
|
||||
mock_request.return_value = \
|
||||
fakes.FakeHTTPResponse(
|
||||
302, 'Found',
|
||||
{'location': 'http://example.com:8082/'},
|
||||
'')
|
||||
client = http.HTTPClient('http://example.com:8082/foo')
|
||||
self.assertRaises(exc.EndpointException,
|
||||
client.json_request, 'DELETE', '')
|
||||
mock_request.assert_called_once_with(
|
||||
'DELETE', 'http://example.com:8082/foo',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
|
||||
def test_http_manual_redirect_error_without_location(self, mock_request):
|
||||
mock_request.return_value = \
|
||||
fakes.FakeHTTPResponse(
|
||||
302, 'Found',
|
||||
{},
|
||||
'')
|
||||
client = http.HTTPClient('http://example.com:8082/foo')
|
||||
self.assertRaises(exc.EndpointException,
|
||||
client.json_request, 'DELETE', '')
|
||||
mock_request.assert_called_once_with(
|
||||
'DELETE', 'http://example.com:8082/foo',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
|
||||
def test_http_json_request_redirect(self, mock_request):
|
||||
# Record the 302
|
||||
mock_request.side_effect = [
|
||||
fakes.FakeHTTPResponse(
|
||||
302, 'Found',
|
||||
{'location': 'http://example.com:8082'},
|
||||
''),
|
||||
fakes.FakeHTTPResponse(
|
||||
200, 'OK',
|
||||
{'content-type': 'application/json'},
|
||||
'{}')]
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082')
|
||||
resp, body = client.json_request('GET', '')
|
||||
self.assertEqual(200, resp.status_code)
|
||||
self.assertEqual({}, body)
|
||||
|
||||
mock_request.assert_has_calls([
|
||||
mock.call('GET', 'http://example.com:8082',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'}),
|
||||
mock.call('GET', 'http://example.com:8082',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
])
|
||||
|
||||
def test_http_404_json_request(self, mock_request):
|
||||
mock_request.return_value = \
|
||||
fakes.FakeHTTPResponse(
|
||||
404, 'Not Found', {'content-type': 'application/json'},
|
||||
'{}')
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082')
|
||||
e = self.assertRaises(exc.HTTPClientError,
|
||||
client.json_request, 'GET', '')
|
||||
# Assert that the raised exception can be converted to string
|
||||
self.assertIsNotNone(str(e))
|
||||
# Record a 404
|
||||
mock_request.assert_called_once_with(
|
||||
'GET', 'http://example.com:8082',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
|
||||
def test_http_300_json_request(self, mock_request):
|
||||
mock_request.return_value = \
|
||||
fakes.FakeHTTPResponse(
|
||||
300, 'OK', {'content-type': 'application/json'},
|
||||
'{}')
|
||||
client = http.HTTPClient('http://example.com:8082')
|
||||
e = self.assertRaises(
|
||||
exc.MultipleChoices, client.json_request, 'GET', '')
|
||||
# Assert that the raised exception can be converted to string
|
||||
self.assertIsNotNone(str(e))
|
||||
|
||||
# Record a 300
|
||||
mock_request.assert_called_once_with(
|
||||
'GET', 'http://example.com:8082',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'})
|
||||
|
||||
def test_fake_json_request(self, mock_request):
|
||||
headers = {'User-Agent': 'python-karborclient'}
|
||||
mock_request.side_effect = [socket.gaierror]
|
||||
|
||||
client = http.HTTPClient('fake://example.com:8082')
|
||||
self.assertRaises(exc.EndpointException,
|
||||
client._http_request, "/", "GET")
|
||||
mock_request.assert_called_once_with('GET', 'fake://example.com:8082/',
|
||||
allow_redirects=False,
|
||||
headers=headers)
|
||||
|
||||
def test_http_request_socket_error(self, mock_request):
|
||||
headers = {'User-Agent': 'python-karborclient'}
|
||||
mock_request.side_effect = [socket.gaierror]
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082')
|
||||
self.assertRaises(exc.EndpointException,
|
||||
client._http_request, "/", "GET")
|
||||
mock_request.assert_called_once_with('GET', 'http://example.com:8082/',
|
||||
allow_redirects=False,
|
||||
headers=headers)
|
||||
|
||||
def test_http_request_socket_timeout(self, mock_request):
|
||||
headers = {'User-Agent': 'python-karborclient'}
|
||||
mock_request.side_effect = [socket.timeout]
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082')
|
||||
self.assertRaises(exc.ConnectionRefused,
|
||||
client._http_request, "/", "GET")
|
||||
mock_request.assert_called_once_with('GET', 'http://example.com:8082/',
|
||||
allow_redirects=False,
|
||||
headers=headers)
|
||||
|
||||
def test_http_request_specify_timeout(self, mock_request):
|
||||
mock_request.return_value = \
|
||||
fakes.FakeHTTPResponse(
|
||||
200, 'OK',
|
||||
{'content-type': 'application/json'},
|
||||
'{}')
|
||||
|
||||
client = http.HTTPClient('http://example.com:8082', timeout='123')
|
||||
resp, body = client.json_request('GET', '')
|
||||
self.assertEqual(200, resp.status_code)
|
||||
self.assertEqual({}, body)
|
||||
mock_request.assert_called_once_with(
|
||||
'GET', 'http://example.com:8082',
|
||||
allow_redirects=False,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'User-Agent': 'python-karborclient'},
|
||||
timeout=float(123))
|
||||
|
||||
def test_get_system_ca_file(self, mock_request):
|
||||
chosen = '/etc/ssl/certs/ca-certificates.crt'
|
||||
with mock.patch('os.path.exists') as mock_os:
|
||||
mock_os.return_value = chosen
|
||||
|
||||
ca = http.get_system_ca_file()
|
||||
self.assertEqual(chosen, ca)
|
||||
|
||||
mock_os.assert_called_once_with(chosen)
|
||||
|
||||
def test_insecure_verify_cert_None(self, mock_request):
|
||||
client = http.HTTPClient('https://foo', insecure=True)
|
||||
self.assertFalse(client.verify_cert)
|
||||
|
||||
def test_passed_cert_to_verify_cert(self, mock_request):
|
||||
client = http.HTTPClient('https://foo', cacert="NOWHERE")
|
||||
self.assertEqual("NOWHERE", client.verify_cert)
|
||||
|
||||
with mock.patch('karborclient.common.http.get_system_ca_file') as gsf:
|
||||
gsf.return_value = "SOMEWHERE"
|
||||
client = http.HTTPClient('https://foo')
|
||||
self.assertEqual("SOMEWHERE", client.verify_cert)
|
@ -1,23 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Tests for `karborclient` module.
|
||||
"""
|
||||
|
||||
from karborclient.tests.unit import base
|
||||
|
||||
|
||||
class TestKarborclient(base.TestCaseShell):
|
||||
|
||||
def test_something(self):
|
||||
pass
|
@ -1,207 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
|
||||
import fixtures
|
||||
from keystoneauth1 import fixture
|
||||
from keystoneauth1.fixture import v2 as ks_v2_fixture
|
||||
import mock
|
||||
from oslo_log import handlers
|
||||
from oslo_log import log
|
||||
import six
|
||||
from testtools import matchers
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
import karborclient.shell
|
||||
from karborclient.tests.unit import base
|
||||
|
||||
|
||||
FAKE_ENV = {'OS_USERNAME': 'username',
|
||||
'OS_PASSWORD': 'password',
|
||||
'OS_TENANT_NAME': 'tenant_name',
|
||||
'OS_AUTH_URL': 'http://no.where/v2.0'}
|
||||
|
||||
FAKE_ENV2 = {'OS_USERNAME': 'username',
|
||||
'OS_PASSWORD': 'password',
|
||||
'OS_TENANT_ID': 'tenant_id',
|
||||
'OS_AUTH_URL': 'http://no.where/v2.0'}
|
||||
|
||||
FAKE_ENV_v3 = {'OS_USERNAME': 'username',
|
||||
'OS_PASSWORD': 'password',
|
||||
'OS_TENANT_ID': 'tenant_id',
|
||||
'OS_USER_DOMAIN_NAME': 'domain_name',
|
||||
'OS_AUTH_URL': 'http://no.where/v3'}
|
||||
|
||||
|
||||
def _create_ver_list(versions):
|
||||
return {'versions': {'values': versions}}
|
||||
|
||||
|
||||
class TestArgs(object):
|
||||
package_version = ''
|
||||
karbor_repo_url = 'http://127.0.0.1'
|
||||
exists_action = ''
|
||||
is_public = False
|
||||
categories = []
|
||||
|
||||
|
||||
class ShellTest(base.TestCaseShell):
|
||||
|
||||
def make_env(self, exclude=None, fake_env=FAKE_ENV):
|
||||
env = dict((k, v) for k, v in fake_env.items() if k != exclude)
|
||||
self.useFixture(fixtures.MonkeyPatch('os.environ', env))
|
||||
|
||||
|
||||
class ShellCommandTest(ShellTest):
|
||||
|
||||
_msg_no_tenant_project = ('You must provide a project name or project'
|
||||
' id via --os-project-name, --os-project-id,'
|
||||
' env[OS_PROJECT_ID] or env[OS_PROJECT_NAME].'
|
||||
' You may use os-project and os-tenant'
|
||||
' interchangeably.',)
|
||||
|
||||
def setUp(self):
|
||||
super(ShellCommandTest, self).setUp()
|
||||
|
||||
def get_auth_endpoint(bound_self, args):
|
||||
return ('test', {})
|
||||
self.useFixture(fixtures.MonkeyPatch(
|
||||
'karborclient.shell.KarborShell._get_endpoint_and_kwargs',
|
||||
get_auth_endpoint))
|
||||
self.client = mock.MagicMock()
|
||||
|
||||
# To prevent log descriptors from being closed during
|
||||
# shell tests set a custom StreamHandler
|
||||
self.logger = log.getLogger(None).logger
|
||||
self.logger.level = logging.DEBUG
|
||||
self.color_handler = handlers.ColorHandler(sys.stdout)
|
||||
self.logger.addHandler(self.color_handler)
|
||||
|
||||
def tearDown(self):
|
||||
super(ShellTest, self).tearDown()
|
||||
self.logger.removeHandler(self.color_handler)
|
||||
|
||||
def shell(self, argstr, exitcodes=(0,)):
|
||||
orig = sys.stdout
|
||||
orig_stderr = sys.stderr
|
||||
try:
|
||||
sys.stdout = six.StringIO()
|
||||
sys.stderr = six.StringIO()
|
||||
_shell = karborclient.shell.KarborShell()
|
||||
_shell.main(argstr.split())
|
||||
except SystemExit:
|
||||
exc_type, exc_value, exc_traceback = sys.exc_info()
|
||||
self.assertIn(exc_value.code, exitcodes)
|
||||
finally:
|
||||
stdout = sys.stdout.getvalue()
|
||||
sys.stdout.close()
|
||||
sys.stdout = orig
|
||||
stderr = sys.stderr.getvalue()
|
||||
sys.stderr.close()
|
||||
sys.stderr = orig_stderr
|
||||
return (stdout, stderr)
|
||||
|
||||
def register_keystone_discovery_fixture(self, mreq):
|
||||
v2_url = "http://no.where/v2.0"
|
||||
v2_version = fixture.V2Discovery(v2_url)
|
||||
mreq.register_uri('GET', v2_url, json=_create_ver_list([v2_version]),
|
||||
status_code=200)
|
||||
|
||||
def register_keystone_token_fixture(self, mreq):
|
||||
v2_token = ks_v2_fixture.Token(token_id='token')
|
||||
service = v2_token.add_service('application-catalog')
|
||||
service.add_endpoint('http://no.where', region='RegionOne')
|
||||
mreq.register_uri('POST',
|
||||
'http://no.where/v2.0/tokens',
|
||||
json=v2_token,
|
||||
status_code=200)
|
||||
|
||||
def test_help_unknown_command(self):
|
||||
self.assertRaises(exceptions.CommandError, self.shell, 'help foofoo')
|
||||
|
||||
def test_help(self):
|
||||
required = [
|
||||
r'.*?^usage: karbor',
|
||||
r'.*?^\s+plan-create\s+Creates a plan.',
|
||||
r'.*?^See "karbor help COMMAND" for help on a specific command',
|
||||
]
|
||||
stdout, stderr = self.shell('help')
|
||||
for r in required:
|
||||
self.assertThat((stdout + stderr),
|
||||
matchers.MatchesRegex(r, re.DOTALL | re.MULTILINE))
|
||||
|
||||
def test_help_on_subcommand(self):
|
||||
required = [
|
||||
r'.*?^usage: karbor plan-create',
|
||||
r'.*?^Creates a plan.',
|
||||
]
|
||||
stdout, stderr = self.shell('help plan-create')
|
||||
for r in required:
|
||||
self.assertThat((stdout + stderr),
|
||||
matchers.MatchesRegex(r, re.DOTALL | re.MULTILINE))
|
||||
|
||||
def test_help_no_options(self):
|
||||
required = [
|
||||
r'.*?^usage: karbor',
|
||||
r'.*?^\s+plan-create\s+Creates a plan',
|
||||
r'.*?^See "karbor help COMMAND" for help on a specific command',
|
||||
]
|
||||
stdout, stderr = self.shell('')
|
||||
for r in required:
|
||||
self.assertThat((stdout + stderr),
|
||||
matchers.MatchesRegex(r, re.DOTALL | re.MULTILINE))
|
||||
|
||||
def test_no_username(self):
|
||||
required = ('You must provide a username via either --os-username or '
|
||||
'env[OS_USERNAME] or a token via --os-auth-token or '
|
||||
'env[OS_AUTH_TOKEN]',)
|
||||
self.make_env(exclude='OS_USERNAME')
|
||||
try:
|
||||
self.shell('plan-list')
|
||||
except exceptions.CommandError as message:
|
||||
self.assertEqual(required, message.args)
|
||||
else:
|
||||
self.fail('CommandError not raised')
|
||||
|
||||
def test_no_tenant_name(self):
|
||||
required = self._msg_no_tenant_project
|
||||
self.make_env(exclude='OS_TENANT_NAME')
|
||||
try:
|
||||
self.shell('plan-list')
|
||||
except exceptions.CommandError as message:
|
||||
self.assertEqual(required, message.args)
|
||||
else:
|
||||
self.fail('CommandError not raised')
|
||||
|
||||
def test_no_tenant_id(self):
|
||||
required = self._msg_no_tenant_project
|
||||
self.make_env(exclude='OS_TENANT_ID', fake_env=FAKE_ENV2)
|
||||
try:
|
||||
self.shell('plan-list')
|
||||
except exceptions.CommandError as message:
|
||||
self.assertEqual(required, message.args)
|
||||
else:
|
||||
self.fail('CommandError not raised')
|
||||
|
||||
def test_no_auth_url(self):
|
||||
required = ('You must provide an auth url'
|
||||
' via either --os-auth-url or via env[OS_AUTH_URL]',)
|
||||
self.make_env(exclude='OS_AUTH_URL')
|
||||
try:
|
||||
self.shell('plan-list')
|
||||
except exceptions.CommandError as message:
|
||||
self.assertEqual(required, message.args)
|
||||
else:
|
||||
self.fail('CommandError not raised')
|
@ -1,122 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
try:
|
||||
import urlparse
|
||||
except ImportError:
|
||||
import urllib.parse as urlparse
|
||||
|
||||
from karborclient.common import http as base_client
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit import fakes
|
||||
from karborclient.v1 import client
|
||||
|
||||
|
||||
REQUEST_ID = 'req-test-request-id'
|
||||
PROJECT_ID = 'efc6a88b-9096-4bb6-8634-cda182a6e12a'
|
||||
|
||||
|
||||
class FakeClient(fakes.FakeClient, client.Client):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs = {
|
||||
'token': 'token',
|
||||
'auth': 'auth_url',
|
||||
'service_type': 'service_type',
|
||||
'endpoint_type': 'endpoint_type',
|
||||
'region_name': 'region_name',
|
||||
'project_id': PROJECT_ID,
|
||||
}
|
||||
client.Client.__init__(self, 'http://endpoint', **kwargs)
|
||||
self.client = self.http_client
|
||||
|
||||
|
||||
class FakeHTTPClient(base_client.HTTPClient):
|
||||
|
||||
def __init__(self, endpoint, **kwargs):
|
||||
super(FakeHTTPClient, self)
|
||||
self.username = 'username'
|
||||
self.password = 'password'
|
||||
self.auth_url = 'auth_url'
|
||||
self.callstack = []
|
||||
self.management_url = 'http://10.0.2.15:8776/v1/fake'
|
||||
self.osapi_max_limit = 1000
|
||||
self.marker = None
|
||||
self.project_id = 'project_id'
|
||||
self.auth_token = 'auth_token'
|
||||
self.region_name = 'region_name'
|
||||
|
||||
def _cs_request(self, url, method, **kwargs):
|
||||
# Check that certain things are called correctly
|
||||
if method in ['GET', 'DELETE']:
|
||||
assert 'body' not in kwargs
|
||||
elif method == 'PUT':
|
||||
assert 'body' in kwargs
|
||||
|
||||
# Call the method
|
||||
args = urlparse.parse_qsl(urlparse.urlparse(url)[4])
|
||||
kwargs.update(args)
|
||||
url_split = url.rsplit('?', 1)
|
||||
munged_url = url_split[0]
|
||||
if len(url_split) > 1:
|
||||
parameters = url_split[1]
|
||||
if 'marker' in parameters:
|
||||
self.marker = int(parameters.rsplit('marker=', 1)[1])
|
||||
else:
|
||||
self.marker = None
|
||||
else:
|
||||
self.marker = None
|
||||
munged_url = munged_url.strip('/').replace('/', '_').replace('.', '_')
|
||||
munged_url = munged_url.replace('-', '_')
|
||||
|
||||
callback = "%s_%s" % (method.lower(), munged_url)
|
||||
|
||||
if not hasattr(self, callback):
|
||||
raise AssertionError('Called unknown API method: %s %s, '
|
||||
'expected fakes method name: %s' %
|
||||
(method, url, callback))
|
||||
|
||||
# Note the call
|
||||
self.callstack.append((method, url, kwargs.get('body')))
|
||||
status, headers, body = getattr(self, callback)(**kwargs)
|
||||
# add fake request-id header
|
||||
headers['x-openstack-request-id'] = REQUEST_ID
|
||||
r = base.TestResponse({
|
||||
"status_code": status,
|
||||
"text": body,
|
||||
"headers": headers,
|
||||
})
|
||||
return r, body
|
||||
|
||||
def json_request(self, method, url, **kwargs):
|
||||
return self._cs_request(url, method, **kwargs)
|
||||
|
||||
def get_providers_1234_checkpoints(self, **kwargs):
|
||||
return 200, {}, {"checkpoints": []}
|
||||
|
||||
def get_plans(self, **kwargs):
|
||||
return 200, {}, {"plans": []}
|
||||
|
||||
def get_operation_logs(self, **kwargs):
|
||||
return 200, {}, {"operation_logs": []}
|
||||
|
||||
def get_restores(self, **kwargs):
|
||||
return 200, {}, {"restores": []}
|
||||
|
||||
def get_scheduled_operations(self, **kwargs):
|
||||
return 200, {}, {"operations": []}
|
||||
|
||||
def get_triggers(self, **kwargs):
|
||||
return 200, {}, {"triggers": []}
|
||||
|
||||
def get_verifications(self, **kwargs):
|
||||
return 200, {}, {"verifications": []}
|
@ -1,117 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit.v1 import fakes
|
||||
|
||||
cs = fakes.FakeClient()
|
||||
mock_request_return = ({}, {'checkpoint': {}})
|
||||
|
||||
FAKE_PROVIDER_ID = "2220f8b1-975d-4621-a872-fa9afb43cb6c"
|
||||
FAKE_PLAN_ID = "3330f8b1-975d-4621-a872-fa9afb43cb6c"
|
||||
FAKE_CHECKPOINT_ID = "e4381b1a-905e-4fec-8104-b4419ccaf963"
|
||||
|
||||
|
||||
class CheckpointsTest(base.TestCaseShell):
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_checkpoints(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.checkpoints.list(provider_id=FAKE_PROVIDER_ID)
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/providers/{provider_id}/checkpoints'.format(
|
||||
provider_id=FAKE_PROVIDER_ID), headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_checkpoints_with_all_tenants(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.checkpoints.list(provider_id=FAKE_PROVIDER_ID,
|
||||
search_opts={'all_tenants': 1})
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/providers/{provider_id}/checkpoints?all_tenants=1'.format(
|
||||
provider_id=FAKE_PROVIDER_ID), headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_get_checkpoint(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.checkpoints.get(FAKE_PROVIDER_ID, '1')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/providers/{provider_id}/checkpoints/1'.format(
|
||||
provider_id=FAKE_PROVIDER_ID), headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.raw_request')
|
||||
def test_delete_checkpoint(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.checkpoints.delete(FAKE_PROVIDER_ID, '1')
|
||||
mock_request.assert_called_with(
|
||||
'DELETE',
|
||||
'/providers/{provider_id}/checkpoints/1'.format(
|
||||
provider_id=FAKE_PROVIDER_ID), headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_checkpoints_with_marker_limit(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.checkpoints.list(provider_id=FAKE_PROVIDER_ID,
|
||||
marker=1234, limit=2)
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/providers/{provider_id}/'
|
||||
'checkpoints?limit=2&marker=1234'.format(
|
||||
provider_id=FAKE_PROVIDER_ID), headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_checkpoints_with_sort_key_dir(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.checkpoints.list(provider_id=FAKE_PROVIDER_ID,
|
||||
sort_key='id', sort_dir='asc')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/providers/{provider_id}/'
|
||||
'checkpoints?sort_dir=asc&sort_key=id'.format(
|
||||
provider_id=FAKE_PROVIDER_ID), headers={})
|
||||
|
||||
def test_list_checkpoints_with_invalid_sort_key(self):
|
||||
self.assertRaises(ValueError,
|
||||
cs.checkpoints.list, FAKE_PROVIDER_ID,
|
||||
sort_key='invalid', sort_dir='asc')
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_create_checkpoint(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.checkpoints.create(FAKE_PROVIDER_ID, FAKE_PLAN_ID)
|
||||
mock_request.assert_called_with(
|
||||
'POST',
|
||||
'/providers/{provider_id}/'
|
||||
'checkpoints'.format(
|
||||
provider_id=FAKE_PROVIDER_ID),
|
||||
data={
|
||||
'checkpoint': {'plan_id': FAKE_PLAN_ID, 'extra-info': None}},
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_reset_checkpoint_state(self, mock_request):
|
||||
mock_request.return_value = ({}, {})
|
||||
cs.checkpoints.reset_state(
|
||||
FAKE_PROVIDER_ID, FAKE_CHECKPOINT_ID, 'error')
|
||||
mock_request.assert_called_with(
|
||||
'PUT',
|
||||
'/providers/{provider_id}/checkpoints/{checkpoint_id}'.format(
|
||||
provider_id=FAKE_PROVIDER_ID,
|
||||
checkpoint_id=FAKE_CHECKPOINT_ID
|
||||
),
|
||||
data={'os-resetState': {'state': 'error'}},
|
||||
headers={})
|
@ -1,79 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit.v1 import fakes
|
||||
|
||||
cs = fakes.FakeClient()
|
||||
mock_request_return = ({}, {'operation_log': {}})
|
||||
|
||||
|
||||
class OperationLogsTest(base.TestCaseShell):
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_operation_logs(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.operation_logs.list()
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/operation_logs', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_operation_logs_with_all_tenants(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.operation_logs.list(search_opts={'all_tenants': 1})
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/operation_logs?all_tenants=1', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_operation_logs_with_marker_limit(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.operation_logs.list(marker=1234, limit=2)
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/operation_logs?limit=2&marker=1234', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_operation_logs_with_sort_key_dir(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.operation_logs.list(sort_key='id', sort_dir='asc')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/operation_logs?'
|
||||
'sort_dir=asc&sort_key=id', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_operation_logs_with_invalid_sort_key(self, mock_request):
|
||||
self.assertRaises(ValueError,
|
||||
cs.operation_logs.list,
|
||||
sort_key='invalid', sort_dir='asc')
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_operation_log(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.operation_logs.get('1')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/operation_logs/1',
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_operation_log_with_headers(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.operation_logs.get('1', session_id='fake_session_id')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/operation_logs/1',
|
||||
headers={'X-Configuration-Session': 'fake_session_id'})
|
@ -1,97 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit.v1 import fakes
|
||||
|
||||
cs = fakes.FakeClient()
|
||||
mock_request_return = ({}, {'plan': {'name': 'fake_name'}})
|
||||
|
||||
|
||||
class PlansTest(base.TestCaseShell):
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_plans_with_marker_limit(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.plans.list(marker=1234, limit=2)
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/plans?limit=2&marker=1234'.format(
|
||||
project_id=fakes.PROJECT_ID), headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_plans_with_sort_key_dir(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.plans.list(sort_key='id', sort_dir='asc')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/plans?'
|
||||
'sort_dir=asc&sort_key=id'.format(
|
||||
project_id=fakes.PROJECT_ID), headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_plans_with_invalid_sort_key(self, mock_request):
|
||||
self.assertRaises(ValueError,
|
||||
cs.plans.list, sort_key='invalid', sort_dir='asc')
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_create_plan(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.plans.create('Plan name', 'provider_id', '', "", '')
|
||||
mock_request.assert_called_with(
|
||||
'POST',
|
||||
'/plans',
|
||||
data={
|
||||
'plan': {'provider_id': 'provider_id',
|
||||
'name': 'Plan name',
|
||||
'resources': '',
|
||||
'parameters': '',
|
||||
'description': ''}},
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.raw_request')
|
||||
def test_delete_plan(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.plans.delete('1')
|
||||
mock_request.assert_called_with(
|
||||
'DELETE',
|
||||
'/plans/1',
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_create_update(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.plans.update('1', {'name': 'Test name.'})
|
||||
mock_request.assert_called_with(
|
||||
'PUT',
|
||||
'/plans/1',
|
||||
data={'plan': {'name': 'Test name.'}}, headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_plan(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.plans.get('1')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/plans/1',
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_plan_with_headers(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.plans.get('1', session_id='fake_session_id')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/plans/1',
|
||||
headers={'X-Configuration-Session': 'fake_session_id'})
|
@ -1,83 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit.v1 import fakes
|
||||
|
||||
cs = fakes.FakeClient()
|
||||
mock_request_return = ({}, {'protectable_type': {}})
|
||||
mock_instances_request_return = ({}, {'instances': {}})
|
||||
mock_instance_request_return = ({}, {'instance': {}})
|
||||
|
||||
|
||||
class ProtectablesTest(base.TestCaseShell):
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_protectables(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.protectables.list()
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/protectables', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_get_protectables(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.protectables.get('OS::Cinder::Volume')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/protectables/OS::Cinder::Volume', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_get_protectables_instance(self, mock_request):
|
||||
mock_request.return_value = mock_instance_request_return
|
||||
cs.protectables.get_instance('OS::Cinder::Volume', '1')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/protectables/OS::Cinder::Volume/'
|
||||
'instances/1', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_protectables_instances(self, mock_request):
|
||||
mock_request.return_value = mock_instances_request_return
|
||||
cs.protectables.list_instances('OS::Cinder::Volume')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/protectables/OS::Cinder::Volume/'
|
||||
'instances', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_protectables_instances_with_marker_limit(self, mock_request):
|
||||
mock_request.return_value = mock_instances_request_return
|
||||
cs.protectables.list_instances('OS::Cinder::Volume',
|
||||
marker=1234, limit=2)
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/protectables/OS::Cinder::Volume/'
|
||||
'instances?limit=2&marker=1234', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_protectables_instances_with_sort_key_dir(self, mock_request):
|
||||
mock_request.return_value = mock_instances_request_return
|
||||
cs.protectables.list_instances('OS::Cinder::Volume',
|
||||
sort_key='id', sort_dir='asc')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/protectables/OS::Cinder::Volume/'
|
||||
'instances?sort_dir=asc&sort_key=id', headers={})
|
||||
|
||||
def test_list_protectables_instances_with_invalid_sort_key(self):
|
||||
self.assertRaises(ValueError,
|
||||
cs.protectables.list_instances, 'OS::Cinder::Volume',
|
||||
sort_key='invalid', sort_dir='asc')
|
@ -1,61 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit.v1 import fakes
|
||||
|
||||
cs = fakes.FakeClient()
|
||||
mock_request_return = ({}, {'provider': {}})
|
||||
|
||||
|
||||
class ProvidersTest(base.TestCaseShell):
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_get_providers(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.providers.get('2220f8b1-975d-4621-a872-fa9afb43cb6c')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/providers/'
|
||||
'2220f8b1-975d-4621-a872-fa9afb43cb6c', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_providers(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.providers.list()
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/providers', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_providers_with_marker_limit(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.providers.list(marker=1234, limit=2)
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/providers?limit=2&marker=1234', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_providers_with_sort_key_dir(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.providers.list(sort_key='id', sort_dir='asc')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/providers?'
|
||||
'sort_dir=asc&sort_key=id', headers={})
|
||||
|
||||
def test_list_providers_with_invalid_sort_key(self):
|
||||
self.assertRaises(ValueError,
|
||||
cs.providers.list,
|
||||
sort_key='invalid', sort_dir='asc')
|
@ -1,58 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit.v1 import fakes
|
||||
|
||||
cs = fakes.FakeClient()
|
||||
mock_request_return = ({}, {'quota_class': {'plans': 50}})
|
||||
|
||||
|
||||
class QuotaClassesTest(base.TestCaseShell):
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_quota_class_update(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.quota_classes.update('default', {'plans': 50})
|
||||
mock_request.assert_called_with(
|
||||
'PUT',
|
||||
'/quota_classes/default',
|
||||
data={'quota_class': {'plans': 50}}, headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_quota_class_update_with_none(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.quota_classes.update('default', {'plans': None})
|
||||
mock_request.assert_called_with(
|
||||
'PUT',
|
||||
'/quota_classes/default',
|
||||
data={'quota_class': {'plans': 50}}, headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_quota_class(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.quota_classes.get('default')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/quota_classes/default',
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_quota_class_with_headers(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.quota_classes.get('default', session_id='fake_session_id')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/quota_classes/default',
|
||||
headers={'X-Configuration-Session': 'fake_session_id'})
|
@ -1,88 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit.v1 import fakes
|
||||
|
||||
cs = fakes.FakeClient()
|
||||
mock_request_return = ({}, {'quota': {'plans': 50}})
|
||||
|
||||
|
||||
class QuotasTest(base.TestCaseShell):
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_quota_update(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.quotas.update(fakes.PROJECT_ID, {'plans': 50})
|
||||
mock_request.assert_called_with(
|
||||
'PUT',
|
||||
'/quotas/{project_id}'.format(project_id=fakes.PROJECT_ID),
|
||||
data={'quota': {'plans': 50}}, headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_quota_update_with_none(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.quotas.update(fakes.PROJECT_ID, {'plans': None})
|
||||
mock_request.assert_called_with(
|
||||
'PUT',
|
||||
'/quotas/{project_id}'.format(project_id=fakes.PROJECT_ID),
|
||||
data={'quota': {'plans': 50}}, headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_quota(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.quotas.get(fakes.PROJECT_ID, detail=False)
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/quotas/{project_id}'.format(project_id=fakes.PROJECT_ID),
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_quota_with_headers(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.quotas.get(fakes.PROJECT_ID, False, session_id='fake_session_id')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/quotas/{project_id}'.format(project_id=fakes.PROJECT_ID),
|
||||
headers={'X-Configuration-Session': 'fake_session_id'})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_quota_with_detail(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.quotas.get(fakes.PROJECT_ID, detail=True)
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/quotas/{project_id}/detail'.format(
|
||||
project_id=fakes.PROJECT_ID),
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_quota_with_default(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.quotas.defaults(fakes.PROJECT_ID)
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/quotas/{project_id}/defaults'.format(
|
||||
project_id=fakes.PROJECT_ID),
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_quota_default_with_headers(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.quotas.defaults(fakes.PROJECT_ID, session_id='fake_session_id')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/quotas/{project_id}/defaults'.format(
|
||||
project_id=fakes.PROJECT_ID),
|
||||
headers={'X-Configuration-Session': 'fake_session_id'})
|
@ -1,85 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit.v1 import fakes
|
||||
|
||||
cs = fakes.FakeClient()
|
||||
mock_request_return = ({}, {'restore': {}})
|
||||
|
||||
|
||||
class RestoresTest(base.TestCaseShell):
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_restores_with_marker_limit(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.restores.list(marker=1234, limit=2)
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/restores?limit=2&marker=1234', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_restores_with_sort_key_dir(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.restores.list(sort_key='id', sort_dir='asc')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/restores?'
|
||||
'sort_dir=asc&sort_key=id', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_plans_with_invalid_sort_key(self, mock_request):
|
||||
self.assertRaises(ValueError,
|
||||
cs.restores.list, sort_key='invalid', sort_dir='asc')
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_create_restore(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.restores.create('586cc6ce-e286-40bd-b2b5-dd32694d9944',
|
||||
'2220f8b1-975d-4621-a872-fa9afb43cb6c',
|
||||
'192.168.1.2:35357/v2.0',
|
||||
'{}',
|
||||
'{"type": "password", "username": "admin", '
|
||||
'"password": "test"}')
|
||||
mock_request.assert_called_with(
|
||||
'POST',
|
||||
'/restores',
|
||||
data={
|
||||
'restore':
|
||||
{
|
||||
'checkpoint_id': '2220f8b1-975d-4621-a872-fa9afb43cb6c',
|
||||
'parameters': '{}',
|
||||
'provider_id': '586cc6ce-e286-40bd-b2b5-dd32694d9944',
|
||||
'restore_target': '192.168.1.2:35357/v2.0',
|
||||
'restore_auth': '{"type": "password", "username": '
|
||||
'"admin", "password": "test"}'
|
||||
}}, headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_restore(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.restores.get('1')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/restores/1',
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_restore_with_headers(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.restores.get('1', session_id='fake_session_id')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/restores/1',
|
||||
headers={'X-Configuration-Session': 'fake_session_id'})
|
@ -1,91 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit.v1 import fakes
|
||||
|
||||
cs = fakes.FakeClient()
|
||||
mock_request_return = ({}, {'scheduled_operation': {'name': 'fake_name'}})
|
||||
|
||||
|
||||
class ScheduledOperationsTest(base.TestCaseShell):
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_scheduled_operations_with_marker_limit(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.scheduled_operations.list(marker=1234, limit=2)
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/scheduled_operations?limit=2&marker=1234', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list__scheduled_operations_with_sort_key_dir(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.scheduled_operations.list(sort_key='id', sort_dir='asc')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/scheduled_operations?'
|
||||
'sort_dir=asc&sort_key=id', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_scheduled_operations_with_invalid_sort_key(self,
|
||||
mock_request):
|
||||
self.assertRaises(ValueError,
|
||||
cs.scheduled_operations.list, sort_key='invalid',
|
||||
sort_dir='asc')
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_create_scheduled_operation(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.scheduled_operations.create(
|
||||
'name', 'operation_type',
|
||||
'efc6a88b-9096-4bb6-8634-cda182a6e12a',
|
||||
'operation_definition')
|
||||
mock_request.assert_called_with(
|
||||
'POST',
|
||||
'/scheduled_operations',
|
||||
data={
|
||||
'scheduled_operation': {
|
||||
'name': 'name',
|
||||
'operation_type': 'operation_type',
|
||||
'trigger_id': 'efc6a88b-9096-4bb6-8634-cda182a6e12a',
|
||||
'operation_definition': 'operation_definition'}},
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.raw_request')
|
||||
def test_delete_scheduled_operation(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.scheduled_operations.delete('1')
|
||||
mock_request.assert_called_with(
|
||||
'DELETE',
|
||||
'/scheduled_operations/1',
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_scheduled_operation(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.scheduled_operations.get('1')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/scheduled_operations/1',
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_scheduled_operation_with_headers(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.scheduled_operations.get('1', session_id='fake_session_id')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/scheduled_operations/1',
|
||||
headers={'X-Configuration-Session': 'fake_session_id'})
|
@ -1,104 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import mock
|
||||
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit.v1 import fakes
|
||||
|
||||
cs = fakes.FakeClient()
|
||||
mock_request_return = ({}, {'service': {}})
|
||||
|
||||
|
||||
class ServicesTest(base.TestCaseShell):
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_services(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.services.list()
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/os-services',
|
||||
headers={}
|
||||
)
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_services_with_host(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.services.list(host='fake_host')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/os-services?host=fake_host',
|
||||
headers={}
|
||||
)
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_services_with_binary(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.services.list(binary='fake_binary')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/os-services?binary=fake_binary',
|
||||
headers={}
|
||||
)
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_services_with_host_and_binary(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.services.list(host='fake_host', binary='fake_binary')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/os-services?binary=fake_binary&host=fake_host',
|
||||
headers={}
|
||||
)
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_enable_service(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
body = {
|
||||
'status': 'enabled'
|
||||
}
|
||||
cs.services.enable('1')
|
||||
mock_request.assert_called_with(
|
||||
'PUT',
|
||||
'/os-services/1',
|
||||
data=body,
|
||||
headers={}
|
||||
)
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_disable_service(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
body = {
|
||||
'status': 'disabled'
|
||||
}
|
||||
cs.services.disable('1')
|
||||
mock_request.assert_called_with(
|
||||
'PUT',
|
||||
'/os-services/1',
|
||||
data=body,
|
||||
headers={}
|
||||
)
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_disable_service_with_reason(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
body = {
|
||||
'status': 'disabled',
|
||||
'disabled_reason': 'fake_reason'
|
||||
}
|
||||
cs.services.disable_log_reason('1', 'fake_reason')
|
||||
mock_request.assert_called_with(
|
||||
'PUT',
|
||||
'/os-services/1',
|
||||
data=body,
|
||||
headers={}
|
||||
)
|
@ -1,137 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import fixtures
|
||||
import mock
|
||||
|
||||
from karborclient import shell
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit.v1 import fakes
|
||||
|
||||
FAKE_PROVIDER_ID = '1234'
|
||||
FAKE_ENDPOINT = 'http://127.0.0.1/identity'
|
||||
|
||||
|
||||
class ShellFixture(fixtures.Fixture):
|
||||
def setUp(self):
|
||||
super(ShellFixture, self).setUp()
|
||||
self.shell = shell.KarborShell()
|
||||
|
||||
def tearDown(self):
|
||||
# For some method like test_image_meta_bad_action we are
|
||||
# testing a SystemExit to be thrown and object self.shell has
|
||||
# no time to get instantiated which is OK in this case, so
|
||||
# we make sure the method is there before launching it.
|
||||
if hasattr(self.shell, 'cs'):
|
||||
self.shell.cs.clear_callstack()
|
||||
super(ShellFixture, self).tearDown()
|
||||
|
||||
|
||||
class ShellTest(base.TestCaseShell):
|
||||
|
||||
FAKE_ENV = {
|
||||
'OS_USERNAME': 'username',
|
||||
'OS_PASSWORD': 'password',
|
||||
'OS_TENANT_NAME': 'project_id',
|
||||
'OS_AUTH_URL': 'http://no.where/v2.0',
|
||||
'OS_AUTH_TOKEN': 'fake_token'
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
"""Run before each test."""
|
||||
super(ShellTest, self).setUp()
|
||||
for var in self.FAKE_ENV:
|
||||
self.useFixture(fixtures.EnvironmentVariable(
|
||||
var, self.FAKE_ENV[var]))
|
||||
self.shell = self.useFixture(ShellFixture()).shell
|
||||
|
||||
get_endpoint = mock.MagicMock()
|
||||
get_endpoint.return_value = FAKE_ENDPOINT
|
||||
self.useFixture(fixtures.MonkeyPatch(
|
||||
'keystoneauth1.identity.generic.token.Token.get_endpoint',
|
||||
get_endpoint))
|
||||
self.useFixture(fixtures.MonkeyPatch('karborclient.client.Client',
|
||||
fakes.FakeClient))
|
||||
self.useFixture(fixtures.MonkeyPatch(
|
||||
'karborclient.common.http._construct_http_client',
|
||||
fakes.FakeHTTPClient))
|
||||
|
||||
def run_command(self, cmd):
|
||||
if not isinstance(cmd, list):
|
||||
cmd = cmd.split()
|
||||
self.shell.main(cmd)
|
||||
|
||||
def assert_called(self, method, url, body=None, **kwargs):
|
||||
return self.shell.cs.assert_called(method, url, body, **kwargs)
|
||||
|
||||
def test_checkpoint_list_with_all_tenants(self):
|
||||
self.run_command(
|
||||
'checkpoint-list ' + FAKE_PROVIDER_ID + ' --all-tenants 1')
|
||||
|
||||
self.assert_called('GET',
|
||||
'/providers/1234/'
|
||||
'checkpoints?all_tenants=1')
|
||||
|
||||
def test_checkpoint_list_with_all(self):
|
||||
self.run_command(
|
||||
'checkpoint-list ' + FAKE_PROVIDER_ID + ' --all')
|
||||
self.assert_called('GET',
|
||||
'/providers/1234/'
|
||||
'checkpoints?all_tenants=1')
|
||||
|
||||
def test_plan_list_with_all_tenants(self):
|
||||
self.run_command('plan-list --all-tenants 1')
|
||||
self.assert_called('GET', '/plans?all_tenants=1')
|
||||
|
||||
def test_plan_list_with_all(self):
|
||||
self.run_command('plan-list --all')
|
||||
self.assert_called('GET', '/plans?all_tenants=1')
|
||||
|
||||
def test_resotre_list_with_all_tenants(self):
|
||||
self.run_command('restore-list --all-tenants 1')
|
||||
self.assert_called('GET', '/restores?all_tenants=1')
|
||||
|
||||
def test_resotre_list_with_all(self):
|
||||
self.run_command('restore-list --all')
|
||||
self.assert_called('GET', '/restores?all_tenants=1')
|
||||
|
||||
def test_verification_list_with_all_tenants(self):
|
||||
self.run_command('verification-list --all-tenants 1')
|
||||
self.assert_called('GET', '/verifications?all_tenants=1')
|
||||
|
||||
def test_verification_list_with_all(self):
|
||||
self.run_command('verification-list --all')
|
||||
self.assert_called('GET', '/verifications?all_tenants=1')
|
||||
|
||||
def test_trigger_list_with_all_tenants(self):
|
||||
self.run_command('trigger-list --all-tenants 1')
|
||||
self.assert_called('GET', '/triggers?all_tenants=1')
|
||||
|
||||
def test_trigger_list_with_all(self):
|
||||
self.run_command('trigger-list --all')
|
||||
self.assert_called('GET', '/triggers?all_tenants=1')
|
||||
|
||||
def test_scheduledoperation_list_with_all_tenants(self):
|
||||
self.run_command('scheduledoperation-list --all-tenants 1')
|
||||
self.assert_called('GET', '/scheduled_operations?all_tenants=1')
|
||||
|
||||
def test_scheduledoperation_list_with_all(self):
|
||||
self.run_command('scheduledoperation-list --all')
|
||||
self.assert_called('GET', '/scheduled_operations?all_tenants=1')
|
||||
|
||||
def test_operationlog_list_with_all_tenants(self):
|
||||
self.run_command('operationlog-list --all-tenants 1')
|
||||
self.assert_called('GET', '/operation_logs?all_tenants=1')
|
||||
|
||||
def test_operationlog_list_with_all(self):
|
||||
self.run_command('operationlog-list --all')
|
||||
self.assert_called('GET', '/operation_logs?all_tenants=1')
|
@ -1,127 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit.v1 import fakes
|
||||
|
||||
cs = fakes.FakeClient()
|
||||
mock_request_return = ({}, {'trigger_info': {'name': 'fake_name'}})
|
||||
|
||||
|
||||
class TriggersTest(base.TestCaseShell):
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_triggers(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.triggers.list()
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/triggers', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_triggers_with_all_tenants(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.triggers.list(search_opts={'all_tenants': 1})
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/triggers?all_tenants=1', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_triggers_with_marker_limit(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.triggers.list(marker=1234, limit=2)
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/triggers?limit=2&marker=1234', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_triggers_with_sort_key_dir(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.triggers.list(sort_key='id', sort_dir='asc')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/triggers?'
|
||||
'sort_dir=asc&sort_key=id', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_triggers_with_invalid_sort_key(self, mock_request):
|
||||
self.assertRaises(ValueError,
|
||||
cs.triggers.list, sort_key='invalid', sort_dir='asc')
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_create_trigger(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.triggers.create('name', 'time', {})
|
||||
mock_request.assert_called_with(
|
||||
'POST',
|
||||
'/triggers',
|
||||
data={
|
||||
'trigger_info': {'name': 'name',
|
||||
'type': 'time',
|
||||
'properties': {}}},
|
||||
headers={})
|
||||
|
||||
def test_create_trigger_with_invalid_window(self):
|
||||
self.assertRaises(exceptions.CommandError,
|
||||
cs.triggers.create,
|
||||
'name', 'time', {'window': 'fake'})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.raw_request')
|
||||
def test_delete_trigger(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.triggers.delete('1')
|
||||
mock_request.assert_called_with(
|
||||
'DELETE',
|
||||
'/triggers/1',
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_trigger(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.triggers.get('1')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/triggers/1',
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_trigger_with_headers(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.triggers.get('1', session_id='fake_session_id')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/triggers/1',
|
||||
headers={'X-Configuration-Session': 'fake_session_id'})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_update_trigger(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
trigger_id = '123'
|
||||
data = {"name": "My Trigger",
|
||||
"properties": {"pattern": "0 10 * * *", "format": "crontab"}}
|
||||
body = {"trigger_info": data}
|
||||
cs.triggers.update(trigger_id, data)
|
||||
mock_request.assert_called_with(
|
||||
'PUT',
|
||||
'/triggers/123',
|
||||
data=body,
|
||||
headers={}
|
||||
)
|
||||
|
||||
def test_update_trigger_with_invalid_window(self):
|
||||
trigger_id = '123'
|
||||
self.assertRaises(exceptions.CommandError,
|
||||
cs.triggers.update,
|
||||
trigger_id, {'properties': {'window': 'fake'}})
|
@ -1,80 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from karborclient.tests.unit import base
|
||||
from karborclient.tests.unit.v1 import fakes
|
||||
|
||||
cs = fakes.FakeClient()
|
||||
mock_request_return = ({}, {'verification': {}})
|
||||
|
||||
|
||||
class VerificationsTest(base.TestCaseShell):
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_verifications_with_marker_limit(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.verifications.list(marker=1234, limit=2)
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/verifications?limit=2&marker=1234', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_verifications_with_sort_key_dir(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.verifications.list(sort_key='id', sort_dir='asc')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/verifications?'
|
||||
'sort_dir=asc&sort_key=id', headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_list_verifications_with_invalid_sort_key(self, mock_request):
|
||||
self.assertRaises(ValueError,
|
||||
cs.verifications.list,
|
||||
sort_key='invalid', sort_dir='asc')
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_create_verification(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.verifications.create('586cc6ce-e286-40bd-b2b5-dd32694d9944',
|
||||
'2220f8b1-975d-4621-a872-fa9afb43cb6c',
|
||||
'{}')
|
||||
mock_request.assert_called_with(
|
||||
'POST',
|
||||
'/verifications',
|
||||
data={
|
||||
'verification':
|
||||
{
|
||||
'checkpoint_id': '2220f8b1-975d-4621-a872-fa9afb43cb6c',
|
||||
'parameters': '{}',
|
||||
'provider_id': '586cc6ce-e286-40bd-b2b5-dd32694d9944'
|
||||
}}, headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_verification(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.verifications.get('1')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/verifications/1',
|
||||
headers={})
|
||||
|
||||
@mock.patch('karborclient.common.http.HTTPClient.json_request')
|
||||
def test_show_verification_with_headers(self, mock_request):
|
||||
mock_request.return_value = mock_request_return
|
||||
cs.verifications.get('1', session_id='fake_session_id')
|
||||
mock_request.assert_called_with(
|
||||
'GET',
|
||||
'/verifications/1',
|
||||
headers={'X-Configuration-Session': 'fake_session_id'})
|
@ -1,146 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
|
||||
|
||||
def extract_resources(args):
|
||||
resources = []
|
||||
for data in args.resources.split(','):
|
||||
if '=' in data and len(data.split('=')) in [3, 4]:
|
||||
resource = dict(zip(['id', 'type', 'name', 'extra_info'],
|
||||
data.split('=')))
|
||||
if resource.get('extra_info'):
|
||||
resource['extra_info'] = jsonutils.loads(
|
||||
resource.get('extra_info'))
|
||||
else:
|
||||
raise exceptions.CommandError(
|
||||
"Unable to parse parameter resources. "
|
||||
"The keys of resource are id , type, name and "
|
||||
"extra_info. The extra_info field is optional.")
|
||||
resources.append(resource)
|
||||
return resources
|
||||
|
||||
|
||||
def check_resources(cs, resources):
|
||||
# check the resource whether it is available
|
||||
for resource in resources:
|
||||
try:
|
||||
instance = cs.protectables.get_instance(
|
||||
resource["type"], resource["id"])
|
||||
except exceptions.NotFound:
|
||||
raise exceptions.CommandError(
|
||||
"The resource: %s can not be found." % resource["id"])
|
||||
else:
|
||||
if instance is None:
|
||||
raise exceptions.CommandError(
|
||||
"The resource: %s is invalid." % resource["id"])
|
||||
|
||||
|
||||
def extract_parameters(args):
|
||||
if all((args.parameters, args.parameters_json)):
|
||||
raise exceptions.CommandError(
|
||||
"Must provide parameters or parameters-json, not both")
|
||||
if not any((args.parameters, args.parameters_json)):
|
||||
return {}
|
||||
|
||||
if args.parameters_json:
|
||||
return jsonutils.loads(args.parameters_json)
|
||||
parameters = {}
|
||||
for resource_params in args.parameters:
|
||||
resource_type = None
|
||||
resource_id = None
|
||||
parameter = {}
|
||||
for param_kv in resource_params.split(','):
|
||||
try:
|
||||
key, value = param_kv.split('=')
|
||||
except Exception:
|
||||
raise exceptions.CommandError(
|
||||
'parameters must be in the form: key1=val1,key2=val2,...'
|
||||
)
|
||||
if key == "resource_type":
|
||||
resource_type = value
|
||||
elif key == "resource_id":
|
||||
if not uuidutils.is_uuid_like(value):
|
||||
raise exceptions.CommandError('resource_id must be a uuid')
|
||||
resource_id = value
|
||||
else:
|
||||
parameter[key] = value
|
||||
if resource_type is None:
|
||||
raise exceptions.CommandError(
|
||||
'Must specify resource_type for parameters'
|
||||
)
|
||||
if resource_id is None:
|
||||
resource_key = resource_type
|
||||
else:
|
||||
resource_key = "%s#%s" % (resource_type, resource_id)
|
||||
parameters[resource_key] = parameter
|
||||
|
||||
return parameters
|
||||
|
||||
|
||||
def extract_instances_parameters(args):
|
||||
parameters = {}
|
||||
for parameter in args.parameters:
|
||||
if '=' in parameter:
|
||||
(key, value) = parameter.split('=', 1)
|
||||
else:
|
||||
key = parameter
|
||||
value = None
|
||||
|
||||
parameters[key] = value
|
||||
return parameters
|
||||
|
||||
|
||||
def extract_extra_info(args):
|
||||
checkpoint_extra_info = {}
|
||||
for data in args.extra_info:
|
||||
# unset doesn't require a val, so we have the if/else
|
||||
if '=' in data:
|
||||
(key, value) = data.split('=', 1)
|
||||
else:
|
||||
key = data
|
||||
value = None
|
||||
|
||||
checkpoint_extra_info[key] = value
|
||||
return checkpoint_extra_info
|
||||
|
||||
|
||||
def extract_properties(args):
|
||||
properties = {}
|
||||
if args.properties is None:
|
||||
return properties
|
||||
for data in args.properties.split(','):
|
||||
if '=' in data:
|
||||
(resource_key, resource_value) = data.split('=', 1)
|
||||
else:
|
||||
raise exceptions.CommandError(
|
||||
"Unable to parse parameter properties.")
|
||||
|
||||
properties[resource_key] = resource_value
|
||||
return properties
|
||||
|
||||
|
||||
def extract_operation_definition(args):
|
||||
operation_definition = {}
|
||||
for data in args.operation_definition.split(','):
|
||||
if '=' in data:
|
||||
(resource_key, resource_value) = data.split('=', 1)
|
||||
else:
|
||||
raise exceptions.CommandError(
|
||||
"Unable to parse parameter operation_definition.")
|
||||
|
||||
operation_definition[resource_key] = resource_value
|
||||
return operation_definition
|
@ -1,134 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from six.moves.urllib import parse
|
||||
|
||||
from karborclient.common import base
|
||||
|
||||
|
||||
class Checkpoint(base.Resource):
|
||||
def __repr__(self):
|
||||
return "<Checkpoint %s>" % self._info
|
||||
|
||||
def get(self):
|
||||
self.set_loaded(True)
|
||||
if not hasattr(self.manager, 'get'):
|
||||
return
|
||||
plan = self.protection_plan
|
||||
if plan is not None:
|
||||
provider_id = plan.get("provider_id")
|
||||
new = self.manager.get(provider_id, self.id)
|
||||
if new:
|
||||
self._add_details(new._info)
|
||||
else:
|
||||
return
|
||||
|
||||
|
||||
class CheckpointManager(base.ManagerWithFind):
|
||||
resource_class = Checkpoint
|
||||
|
||||
def create(self, provider_id, plan_id, checkpoint_extra_info=None):
|
||||
body = {'checkpoint': {'plan_id': plan_id,
|
||||
'extra-info': checkpoint_extra_info}}
|
||||
url = "/providers/{provider_id}/" \
|
||||
"checkpoints" .format(provider_id=provider_id)
|
||||
return self._create(url, body, 'checkpoint')
|
||||
|
||||
def reset_state(self, provider_id, checkpoint_id, state):
|
||||
body = {'os-resetState': {'state': state}}
|
||||
return self.update(provider_id, checkpoint_id, body)
|
||||
|
||||
def update(self, provider_id, checkpoint_id, values):
|
||||
url = '/providers/{provider_id}/checkpoints/{checkpoint_id}'.format(
|
||||
provider_id=provider_id, checkpoint_id=checkpoint_id)
|
||||
return self._update(url, values)
|
||||
|
||||
def delete(self, provider_id, checkpoint_id):
|
||||
path = '/providers/{provider_id}/checkpoints/' \
|
||||
'{checkpoint_id}'.format(provider_id=provider_id,
|
||||
checkpoint_id=checkpoint_id)
|
||||
return self._delete(path)
|
||||
|
||||
def get(self, provider_id, checkpoint_id, session_id=None):
|
||||
if session_id:
|
||||
headers = {'X-Configuration-Session': session_id}
|
||||
else:
|
||||
headers = {}
|
||||
url = '/providers/{provider_id}/checkpoints/' \
|
||||
'{checkpoint_id}'.format(provider_id=provider_id,
|
||||
checkpoint_id=checkpoint_id)
|
||||
return self._get(url, response_key="checkpoint", headers=headers)
|
||||
|
||||
def list(self, provider_id=None, search_opts=None, marker=None,
|
||||
limit=None, sort_key=None, sort_dir=None, sort=None):
|
||||
"""Lists all checkpoints.
|
||||
|
||||
:param provider_id:
|
||||
:param search_opts: Search options to filter out checkpoints.
|
||||
:param marker: Begin returning checkpoints that appear later in the
|
||||
checkpoints list.
|
||||
:param limit: Maximum number of checkpoints to return.
|
||||
:param sort_key: Key to be sorted; deprecated in kilo
|
||||
:param sort_dir: Sort direction, should be 'desc' or 'asc'; deprecated
|
||||
in kilo
|
||||
:param sort: Sort information
|
||||
:rtype: list of :class:`checkpoint`
|
||||
"""
|
||||
|
||||
url = self._build_checkpoints_list_url(
|
||||
provider_id,
|
||||
search_opts=search_opts, marker=marker,
|
||||
limit=limit, sort_key=sort_key,
|
||||
sort_dir=sort_dir, sort=sort)
|
||||
return self._list(url, 'checkpoints')
|
||||
|
||||
def _build_checkpoints_list_url(self, provider_id,
|
||||
search_opts=None, marker=None, limit=None,
|
||||
sort_key=None, sort_dir=None, sort=None):
|
||||
|
||||
if search_opts is None:
|
||||
search_opts = {}
|
||||
|
||||
query_params = {}
|
||||
for key, val in search_opts.items():
|
||||
if val:
|
||||
query_params[key] = val
|
||||
|
||||
if marker:
|
||||
query_params['marker'] = marker
|
||||
|
||||
if limit:
|
||||
query_params['limit'] = limit
|
||||
|
||||
if sort:
|
||||
query_params['sort'] = self._format_sort_param(sort)
|
||||
else:
|
||||
# sort_key and sort_dir deprecated in kilo, prefer sort
|
||||
if sort_key:
|
||||
query_params['sort_key'] = self._format_sort_key_param(
|
||||
sort_key)
|
||||
|
||||
if sort_dir:
|
||||
query_params['sort_dir'] = self._format_sort_dir_param(
|
||||
sort_dir)
|
||||
|
||||
# Transform the dict to a sequence of two-element tuples in fixed
|
||||
# order, then the encoded string will be consistent in Python 2&3.
|
||||
query_string = ""
|
||||
if query_params:
|
||||
params = sorted(query_params.items(), key=lambda x: x[0])
|
||||
query_string = "?%s" % parse.urlencode(params)
|
||||
|
||||
return ("/providers/%(provider_id)s"
|
||||
"/checkpoints%(query_string)s" %
|
||||
{"provider_id": provider_id,
|
||||
"query_string": query_string})
|
@ -1,56 +0,0 @@
|
||||
# Copyright (c) 2013 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from karborclient.common import http
|
||||
from karborclient.v1 import checkpoints
|
||||
from karborclient.v1 import operation_logs
|
||||
from karborclient.v1 import plans
|
||||
from karborclient.v1 import protectables
|
||||
from karborclient.v1 import providers
|
||||
from karborclient.v1 import quota_classes
|
||||
from karborclient.v1 import quotas
|
||||
from karborclient.v1 import restores
|
||||
from karborclient.v1 import scheduled_operations
|
||||
from karborclient.v1 import services
|
||||
from karborclient.v1 import triggers
|
||||
from karborclient.v1 import verifications
|
||||
|
||||
|
||||
class Client(object):
|
||||
"""Client for the karbor v1 API.
|
||||
|
||||
:param string endpoint: A user-supplied endpoint URL for the service.
|
||||
:param string token: Token for authentication.
|
||||
:param integer timeout: Allows customization of the timeout for client
|
||||
http requests. (optional)
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize a new client for the karbor v1 API."""
|
||||
self.http_client = http._construct_http_client(*args, **kwargs)
|
||||
self.plans = plans.PlanManager(self.http_client)
|
||||
self.restores = restores.RestoreManager(self.http_client)
|
||||
self.protectables = protectables.ProtectableManager(self.http_client)
|
||||
self.providers = providers.ProviderManager(self.http_client)
|
||||
self.checkpoints = checkpoints.CheckpointManager(self.http_client)
|
||||
self.triggers = triggers.TriggerManager(self.http_client)
|
||||
self.scheduled_operations = \
|
||||
scheduled_operations.ScheduledOperationManager(self.http_client)
|
||||
self.operation_logs = \
|
||||
operation_logs.OperationLogManager(self.http_client)
|
||||
self.verifications = verifications.VerificationManager(
|
||||
self.http_client)
|
||||
self.services = services.ServiceManager(self.http_client)
|
||||
self.quotas = quotas.QuotaManager(self.http_client)
|
||||
self.quota_classes = quota_classes.QuotaClassManager(self.http_client)
|
@ -1,44 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from karborclient.common import base
|
||||
|
||||
|
||||
class OperationLog(base.Resource):
|
||||
def __repr__(self):
|
||||
return "<OperationLog %s>" % self._info
|
||||
|
||||
|
||||
class OperationLogManager(base.ManagerWithFind):
|
||||
resource_class = OperationLog
|
||||
|
||||
def get(self, operation_log_id, session_id=None):
|
||||
if session_id:
|
||||
headers = {'X-Configuration-Session': session_id}
|
||||
else:
|
||||
headers = {}
|
||||
url = "/operation_logs/{operation_log_id}".format(
|
||||
operation_log_id=operation_log_id)
|
||||
return self._get(url, response_key="operation_log", headers=headers)
|
||||
|
||||
def list(self, detailed=False, search_opts=None, marker=None, limit=None,
|
||||
sort_key=None, sort_dir=None, sort=None):
|
||||
"""Lists all operation_logs.
|
||||
|
||||
"""
|
||||
resource_type = "operation_logs"
|
||||
url = self._build_list_url(
|
||||
resource_type, detailed=detailed,
|
||||
search_opts=search_opts, marker=marker,
|
||||
limit=limit, sort_key=sort_key,
|
||||
sort_dir=sort_dir, sort=sort)
|
||||
return self._list(url, 'operation_logs')
|
@ -1,79 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from karborclient.common import base
|
||||
|
||||
|
||||
class Plan(base.Resource):
|
||||
def __repr__(self):
|
||||
return "<Plan %s>" % self._info
|
||||
|
||||
|
||||
class PlanManager(base.ManagerWithFind):
|
||||
resource_class = Plan
|
||||
|
||||
def create(self, name, provider_id, resources, parameters,
|
||||
description=None):
|
||||
body = {'plan': {'name': name,
|
||||
'description': description,
|
||||
'provider_id': provider_id,
|
||||
'resources': resources,
|
||||
'parameters': parameters
|
||||
}}
|
||||
url = "/plans"
|
||||
return self._create(url, body, 'plan')
|
||||
|
||||
def update(self, plan_id, data):
|
||||
|
||||
body = {"plan": data}
|
||||
|
||||
return self._update('/plans/{plan_id}'
|
||||
.format(plan_id=plan_id),
|
||||
body, "plan")
|
||||
|
||||
def delete(self, plan_id):
|
||||
path = '/plans/{plan_id}'.format(
|
||||
plan_id=plan_id)
|
||||
return self._delete(path)
|
||||
|
||||
def get(self, plan_id, session_id=None):
|
||||
if session_id:
|
||||
headers = {'X-Configuration-Session': session_id}
|
||||
else:
|
||||
headers = {}
|
||||
url = "/plans/{plan_id}".format(
|
||||
plan_id=plan_id)
|
||||
return self._get(url, response_key="plan", headers=headers)
|
||||
|
||||
def list(self, detailed=False, search_opts=None, marker=None, limit=None,
|
||||
sort_key=None, sort_dir=None, sort=None):
|
||||
"""Lists all plans.
|
||||
|
||||
:param detailed: Whether to return detailed volume info.
|
||||
:param search_opts: Search options to filter out volumes.
|
||||
:param marker: Begin returning volumes that appear later in the volume
|
||||
list than that represented by this volume id.
|
||||
:param limit: Maximum number of volumes to return.
|
||||
:param sort_key: Key to be sorted; deprecated in kilo
|
||||
:param sort_dir: Sort direction, should be 'desc' or 'asc'; deprecated
|
||||
in kilo
|
||||
:param sort: Sort information
|
||||
:rtype: list of :class:`Plan`
|
||||
"""
|
||||
|
||||
resource_type = "plans"
|
||||
url = self._build_list_url(
|
||||
resource_type, detailed=detailed,
|
||||
search_opts=search_opts, marker=marker,
|
||||
limit=limit, sort_key=sort_key,
|
||||
sort_dir=sort_dir, sort=sort)
|
||||
return self._list(url, 'plans')
|
@ -1,137 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from six.moves.urllib import parse
|
||||
|
||||
from karborclient.common import base
|
||||
|
||||
|
||||
class Protectable(base.Resource):
|
||||
def __repr__(self):
|
||||
return "<Protectable %s>" % self._info
|
||||
|
||||
|
||||
class Instances(base.Resource):
|
||||
def __repr__(self):
|
||||
return "<Instances %s>" % self._info
|
||||
|
||||
|
||||
class ProtectableManager(base.ManagerWithFind):
|
||||
resource_class = Protectable
|
||||
|
||||
def get(self, protectable_type, session_id=None):
|
||||
if session_id:
|
||||
headers = {'X-Configuration-Session': session_id}
|
||||
else:
|
||||
headers = {}
|
||||
url = "/protectables/{protectable_type}".format(
|
||||
protectable_type=protectable_type)
|
||||
return self._get(url, response_key="protectable_type", headers=headers)
|
||||
|
||||
def list(self):
|
||||
url = "/protectables"
|
||||
protectables = self._list(url, 'protectable_type', return_raw=True)
|
||||
|
||||
protectables_list = []
|
||||
for protectable in protectables:
|
||||
protectable_dict = {}
|
||||
protectable_dict['protectable_type'] = protectable
|
||||
protectables_list.append(Protectable(self, protectable_dict))
|
||||
return protectables_list
|
||||
|
||||
def list_instances(self, protectable_type, search_opts=None, marker=None,
|
||||
limit=None, sort_key=None, sort_dir=None, sort=None):
|
||||
"""Lists all instances.
|
||||
|
||||
:param protectable_type:
|
||||
:param search_opts: Search options to filter out instances.
|
||||
:param marker: Begin returning volumes that appear later in the
|
||||
instances list.
|
||||
:param limit: Maximum number of instances to return.
|
||||
:param sort_key: Key to be sorted; deprecated in kilo
|
||||
:param sort_dir: Sort direction, should be 'desc' or 'asc'; deprecated
|
||||
in kilo
|
||||
:param sort: Sort information
|
||||
:rtype: list of :class:`Instances`
|
||||
"""
|
||||
|
||||
url = self._build_instances_list_url(
|
||||
protectable_type,
|
||||
search_opts=search_opts, marker=marker,
|
||||
limit=limit, sort_key=sort_key,
|
||||
sort_dir=sort_dir, sort=sort)
|
||||
return self._list(url, response_key='instances', obj_class=Instances)
|
||||
|
||||
def get_instance(self, type, id, search_opts=None, session_id=None):
|
||||
if session_id:
|
||||
headers = {'X-Configuration-Session': session_id}
|
||||
else:
|
||||
headers = {}
|
||||
|
||||
if search_opts is None:
|
||||
search_opts = {}
|
||||
query_params = {}
|
||||
for key, val in search_opts.items():
|
||||
if val:
|
||||
query_params[key] = val
|
||||
query_string = ""
|
||||
if query_params:
|
||||
params = sorted(query_params.items(), key=lambda x: x[0])
|
||||
query_string = "?%s" % parse.urlencode(params)
|
||||
|
||||
url = ("/protectables/{protectable_type}/instances/"
|
||||
"{protectable_id}{query_string}").format(
|
||||
protectable_type=type, protectable_id=id,
|
||||
query_string=query_string)
|
||||
return self._get(url, response_key="instance", headers=headers)
|
||||
|
||||
def _build_instances_list_url(self, protectable_type,
|
||||
search_opts=None, marker=None, limit=None,
|
||||
sort_key=None, sort_dir=None, sort=None):
|
||||
|
||||
if search_opts is None:
|
||||
search_opts = {}
|
||||
|
||||
query_params = {}
|
||||
for key, val in search_opts.items():
|
||||
if val:
|
||||
query_params[key] = val
|
||||
|
||||
if marker:
|
||||
query_params['marker'] = marker
|
||||
|
||||
if limit:
|
||||
query_params['limit'] = limit
|
||||
|
||||
if sort:
|
||||
query_params['sort'] = self._format_sort_param(sort)
|
||||
else:
|
||||
# sort_key and sort_dir deprecated in kilo, prefer sort
|
||||
if sort_key:
|
||||
query_params['sort_key'] = self._format_sort_key_param(
|
||||
sort_key)
|
||||
|
||||
if sort_dir:
|
||||
query_params['sort_dir'] = self._format_sort_dir_param(
|
||||
sort_dir)
|
||||
|
||||
# Transform the dict to a sequence of two-element tuples in fixed
|
||||
# order, then the encoded string will be consistent in Python 2&3.
|
||||
query_string = ""
|
||||
if query_params:
|
||||
params = sorted(query_params.items(), key=lambda x: x[0])
|
||||
query_string = "?%s" % parse.urlencode(params)
|
||||
|
||||
return ("/protectables/%(protectable_type)s"
|
||||
"/instances%(query_string)s" %
|
||||
{"protectable_type": protectable_type,
|
||||
"query_string": query_string})
|
@ -1,55 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from karborclient.common import base
|
||||
|
||||
|
||||
class Provider(base.Resource):
|
||||
def __repr__(self):
|
||||
return "<Provider %s>" % self._info
|
||||
|
||||
|
||||
class ProviderManager(base.ManagerWithFind):
|
||||
resource_class = Provider
|
||||
|
||||
def get(self, provider_id, session_id=None):
|
||||
if session_id:
|
||||
headers = {'X-Configuration-Session': session_id}
|
||||
else:
|
||||
headers = {}
|
||||
url = "/providers/{provider_id}".format(
|
||||
provider_id=provider_id)
|
||||
return self._get(url, response_key="provider", headers=headers)
|
||||
|
||||
def list(self, detailed=False, search_opts=None, marker=None, limit=None,
|
||||
sort_key=None, sort_dir=None, sort=None):
|
||||
"""Lists all providers.
|
||||
|
||||
:param detailed: Whether to return detailed provider info.
|
||||
:param search_opts: Search options to filter out provider.
|
||||
:param marker: Begin returning volumes that appear later in the
|
||||
provider list than that represented by this provider id.
|
||||
:param limit: Maximum number of providers to return.
|
||||
:param sort_key: Key to be sorted; deprecated in kilo
|
||||
:param sort_dir: Sort direction, should be 'desc' or 'asc'; deprecated
|
||||
in kilo
|
||||
:param sort: Sort information
|
||||
:rtype: list of :class:`Provider`
|
||||
"""
|
||||
|
||||
resource_type = "providers"
|
||||
url = self._build_list_url(
|
||||
resource_type, detailed=detailed,
|
||||
search_opts=search_opts, marker=marker,
|
||||
limit=limit, sort_key=sort_key,
|
||||
sort_dir=sort_dir, sort=sort)
|
||||
return self._list(url, 'providers')
|
@ -1,45 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from karborclient.common import base
|
||||
|
||||
|
||||
class QuotaClass(base.Resource):
|
||||
def __repr__(self):
|
||||
return "<QuotaClass %s>" % self._info
|
||||
|
||||
|
||||
class QuotaClassManager(base.ManagerWithFind):
|
||||
resource_class = QuotaClass
|
||||
|
||||
def list(self):
|
||||
pass
|
||||
|
||||
def update(self, class_name, data):
|
||||
|
||||
if "plans" in data and data["plans"] is None:
|
||||
data["plans"] = 50
|
||||
|
||||
body = {"quota_class": data}
|
||||
|
||||
return self._update('/quota_classes/{class_name}'
|
||||
.format(class_name=class_name),
|
||||
body, "quota_class")
|
||||
|
||||
def get(self, class_name, session_id=None):
|
||||
if session_id:
|
||||
headers = {'X-Configuration-Session': session_id}
|
||||
else:
|
||||
headers = {}
|
||||
url = "/quota_classes/{class_name}".format(
|
||||
class_name=class_name)
|
||||
return self._get(url, response_key="quota_class", headers=headers)
|
@ -1,58 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from karborclient.common import base
|
||||
|
||||
|
||||
class Quota(base.Resource):
|
||||
def __repr__(self):
|
||||
return "<Quota %s>" % self._info
|
||||
|
||||
|
||||
class QuotaManager(base.ManagerWithFind):
|
||||
resource_class = Quota
|
||||
|
||||
def list(self):
|
||||
pass
|
||||
|
||||
def update(self, project_id, data):
|
||||
|
||||
if "plans" in data and data["plans"] is None:
|
||||
data["plans"] = 50
|
||||
|
||||
body = {"quota": data}
|
||||
|
||||
return self._update('/quotas/{project_id}'
|
||||
.format(project_id=project_id),
|
||||
body, "quota")
|
||||
|
||||
def get(self, project_id, detail, session_id=None):
|
||||
if session_id:
|
||||
headers = {'X-Configuration-Session': session_id}
|
||||
else:
|
||||
headers = {}
|
||||
base_url = "/quotas/{project_id}".format(
|
||||
project_id=project_id)
|
||||
if detail:
|
||||
url = base_url + '/detail'
|
||||
else:
|
||||
url = base_url
|
||||
return self._get(url, response_key="quota", headers=headers)
|
||||
|
||||
def defaults(self, project_id, session_id=None):
|
||||
if session_id:
|
||||
headers = {'X-Configuration-Session': session_id}
|
||||
else:
|
||||
headers = {}
|
||||
url = "/quotas/{project_id}/defaults".format(
|
||||
project_id=project_id)
|
||||
return self._get(url, response_key="quota", headers=headers)
|
@ -1,69 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from karborclient.common import base
|
||||
|
||||
|
||||
class Restore(base.Resource):
|
||||
def __repr__(self):
|
||||
return "<Restore %s>" % self._info
|
||||
|
||||
|
||||
class RestoreManager(base.ManagerWithFind):
|
||||
resource_class = Restore
|
||||
|
||||
def create(self, provider_id, checkpoint_id, restore_target, parameters,
|
||||
restore_auth):
|
||||
body = {
|
||||
'restore': {
|
||||
'provider_id': provider_id,
|
||||
'checkpoint_id': checkpoint_id,
|
||||
'restore_target': restore_target,
|
||||
'restore_auth': restore_auth,
|
||||
'parameters': parameters,
|
||||
}
|
||||
}
|
||||
url = "/restores"
|
||||
return self._create(url, body, 'restore')
|
||||
|
||||
def get(self, restore_id, session_id=None):
|
||||
if session_id:
|
||||
headers = {'X-Configuration-Session': session_id}
|
||||
else:
|
||||
headers = {}
|
||||
url = "/restores/{restore_id}".format(
|
||||
restore_id=restore_id)
|
||||
return self._get(url, response_key="restore", headers=headers)
|
||||
|
||||
def list(self, detailed=False, search_opts=None, marker=None, limit=None,
|
||||
sort_key=None, sort_dir=None, sort=None):
|
||||
"""Lists all restores.
|
||||
|
||||
:param detailed: Whether to return detailed restore info.
|
||||
:param search_opts: Search options to filter out restores.
|
||||
:param marker: Begin returning volumes that appear later in the restore
|
||||
list than that represented by this volume id.
|
||||
:param limit: Maximum number of restores to return.
|
||||
:param sort_key: Key to be sorted; deprecated in kilo
|
||||
:param sort_dir: Sort direction, should be 'desc' or 'asc'; deprecated
|
||||
in kilo
|
||||
:param sort: Sort information
|
||||
:rtype: list of :class:`Restore`
|
||||
"""
|
||||
|
||||
resource_type = "restores"
|
||||
url = self._build_list_url(
|
||||
resource_type, detailed=detailed,
|
||||
search_opts=search_opts, marker=marker,
|
||||
limit=limit, sort_key=sort_key,
|
||||
sort_dir=sort_dir, sort=sort)
|
||||
return self._list(url, 'restores')
|
@ -1,59 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from karborclient.common import base
|
||||
|
||||
|
||||
class ScheduledOperation(base.Resource):
|
||||
def __repr__(self):
|
||||
return "<ScheduledOperation %s>" % self._info
|
||||
|
||||
|
||||
class ScheduledOperationManager(base.ManagerWithFind):
|
||||
resource_class = ScheduledOperation
|
||||
|
||||
def create(self, name, operation_type, trigger_id, operation_definition):
|
||||
body = {'scheduled_operation': {'name': name,
|
||||
'operation_type': operation_type,
|
||||
'trigger_id': trigger_id,
|
||||
'operation_definition':
|
||||
operation_definition,
|
||||
}}
|
||||
url = "/scheduled_operations"
|
||||
return self._create(url, body, 'scheduled_operation')
|
||||
|
||||
def delete(self, scheduled_operation_id):
|
||||
path = '/scheduled_operations/{scheduled_operation_id}'.\
|
||||
format(scheduled_operation_id=scheduled_operation_id)
|
||||
return self._delete(path)
|
||||
|
||||
def get(self, scheduled_operation_id, session_id=None):
|
||||
if session_id:
|
||||
headers = {'X-Configuration-Session': session_id}
|
||||
else:
|
||||
headers = {}
|
||||
url = "/scheduled_operations/{scheduled_operation_id}".\
|
||||
format(scheduled_operation_id=scheduled_operation_id)
|
||||
return self._get(url, response_key="scheduled_operation",
|
||||
headers=headers)
|
||||
|
||||
def list(self, detailed=False, search_opts=None, marker=None, limit=None,
|
||||
sort_key=None, sort_dir=None, sort=None):
|
||||
"""Lists all scheduled_operations."""
|
||||
|
||||
resource_type = "scheduled_operations"
|
||||
url = self._build_list_url(
|
||||
resource_type, detailed=detailed,
|
||||
search_opts=search_opts, marker=marker,
|
||||
limit=limit, sort_key=sort_key,
|
||||
sort_dir=sort_dir, sort=sort)
|
||||
return self._list(url, 'operations')
|
@ -1,64 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from karborclient.common import base
|
||||
|
||||
|
||||
class Service(base.Resource):
|
||||
def __repr__(self):
|
||||
return "<Service %s>" % self._info
|
||||
|
||||
|
||||
class ServiceManager(base.ManagerWithFind):
|
||||
resource_class = Service
|
||||
|
||||
def enable(self, service_id):
|
||||
"""Enable the service specified by the service ID
|
||||
|
||||
:param service_id: The ID of the service to enable.
|
||||
"""
|
||||
body = {
|
||||
'status': 'enabled'
|
||||
}
|
||||
return self._update('/os-services/%s' % service_id, body, "service")
|
||||
|
||||
def disable(self, service_id):
|
||||
"""Disable the service specified by the service ID.
|
||||
|
||||
:param service_id: The ID of the service to disable.
|
||||
"""
|
||||
body = {
|
||||
'status': 'disabled'
|
||||
}
|
||||
return self._update('/os-services/%s' % service_id, body, "service")
|
||||
|
||||
def disable_log_reason(self, service_id, reason):
|
||||
"""Disable the service with a reason.
|
||||
|
||||
:param service_id: The ID of the service to disable.
|
||||
:param reason: The reason for disabling a service.
|
||||
"""
|
||||
body = {
|
||||
'status': 'disabled',
|
||||
'disabled_reason': reason
|
||||
}
|
||||
return self._update("/os-services/%s" % service_id, body, "service")
|
||||
|
||||
def list(self, host=None, binary=None):
|
||||
"""Lists all services."""
|
||||
search_opts = {
|
||||
'host': host,
|
||||
'binary': binary
|
||||
}
|
||||
resource_type = "os-services"
|
||||
url = self._build_list_url(resource_type, search_opts=search_opts)
|
||||
return self._list(url, 'services')
|
File diff suppressed because it is too large
Load Diff
@ -1,77 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from karborclient.common.apiclient import exceptions
|
||||
from karborclient.common import base
|
||||
|
||||
|
||||
class Trigger(base.Resource):
|
||||
def __repr__(self):
|
||||
return "<Trigger %s>" % self._info
|
||||
|
||||
|
||||
class TriggerManager(base.ManagerWithFind):
|
||||
resource_class = Trigger
|
||||
|
||||
def create(self, name, type, properties):
|
||||
if properties.get('window', None):
|
||||
try:
|
||||
properties['window'] = int(properties['window'])
|
||||
except Exception:
|
||||
msg = 'The trigger window is not integer'
|
||||
raise exceptions.CommandError(msg)
|
||||
body = {'trigger_info': {'name': name,
|
||||
'type': type,
|
||||
'properties': properties,
|
||||
}}
|
||||
url = "/triggers"
|
||||
return self._create(url, body, 'trigger_info')
|
||||
|
||||
def delete(self, trigger_id):
|
||||
path = '/triggers/{trigger_id}'.format(
|
||||
trigger_id=trigger_id)
|
||||
return self._delete(path)
|
||||
|
||||
def get(self, trigger_id, session_id=None):
|
||||
if session_id:
|
||||
headers = {'X-Configuration-Session': session_id}
|
||||
else:
|
||||
headers = {}
|
||||
url = "/triggers/{trigger_id}".format(
|
||||
trigger_id=trigger_id)
|
||||
return self._get(url, response_key="trigger_info", headers=headers)
|
||||
|
||||
def update(self, trigger_id, data):
|
||||
|
||||
if data['properties'].get('window', None):
|
||||
try:
|
||||
data['properties']['window'] = int(
|
||||
data['properties']['window'])
|
||||
except Exception:
|
||||
msg = 'The trigger window is not integer'
|
||||
raise exceptions.CommandError(msg)
|
||||
body = {"trigger_info": data}
|
||||
return self._update('/triggers/{trigger_id}'
|
||||
.format(trigger_id=trigger_id),
|
||||
body, "trigger_info")
|
||||
|
||||
def list(self, detailed=False, search_opts=None, marker=None, limit=None,
|
||||
sort_key=None, sort_dir=None, sort=None):
|
||||
"""Lists all triggers."""
|
||||
|
||||
resource_type = "triggers"
|
||||
url = self._build_list_url(
|
||||
resource_type, detailed=detailed,
|
||||
search_opts=search_opts, marker=marker,
|
||||
limit=limit, sort_key=sort_key,
|
||||
sort_dir=sort_dir, sort=sort)
|
||||
return self._list(url, 'triggers')
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user