Retire Packaging Deb project repos
This commit is part of a series to retire the Packaging Deb project. Step 2 is to remove all content from the project repos, replacing it with a README notification where to find ongoing work, and how to recover the repo if needed at some future point (as in https://docs.openstack.org/infra/manual/drivers.html#retiring-a-project). Change-Id: I016fb3710b0de757d2313c16114777294118c85a
This commit is contained in:
parent
b204c8acef
commit
40c2d21c9d
66
.gitignore
vendored
66
.gitignore
vendored
@ -1,66 +0,0 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
*.eggs
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
cover/
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.cache
|
||||
.testrepository
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
|
||||
# Sphinx documentation
|
||||
doc/build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Files created by releasenotes build
|
||||
releasenotes/build
|
||||
|
||||
# pbr generated files
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
|
||||
# swap file
|
||||
*.swp
|
@ -1,4 +0,0 @@
|
||||
[gerrit]
|
||||
host=review.openstack.org
|
||||
port=29418
|
||||
project=openstack/python-senlinclient.git
|
@ -1,4 +0,0 @@
|
||||
[DEFAULT]
|
||||
test_command=${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION
|
||||
test_id_option=--load-list $IDFILE
|
||||
test_list_option=--list
|
@ -1,16 +0,0 @@
|
||||
If you would like to contribute to the development of OpenStack,
|
||||
you must follow the steps documented at:
|
||||
|
||||
https://docs.openstack.org/infra/manual/developers.html
|
||||
|
||||
Once those steps have been completed, changes to OpenStack
|
||||
should be submitted for review via the Gerrit tool, following
|
||||
the workflow documented at:
|
||||
|
||||
https://docs.openstack.org/infra/manual/developers.html#development-workflow
|
||||
|
||||
Pull requests submitted through GitHub will be ignored.
|
||||
|
||||
Bugs should be filed on Launchpad, not GitHub:
|
||||
|
||||
https://bugs.launchpad.net/python-senlinclient
|
175
LICENSE
175
LICENSE
@ -1,175 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
14
README
Normal file
14
README
Normal file
@ -0,0 +1,14 @@
|
||||
This project is no longer maintained.
|
||||
|
||||
The contents of this repository are still available in the Git
|
||||
source code management system. To see the contents of this
|
||||
repository before it reached its end of life, please check out the
|
||||
previous commit with "git checkout HEAD^1".
|
||||
|
||||
For ongoing work on maintaining OpenStack packages in the Debian
|
||||
distribution, please see the Debian OpenStack packaging team at
|
||||
https://wiki.debian.org/OpenStack/.
|
||||
|
||||
For any further questions, please email
|
||||
openstack-dev@lists.openstack.org or join #openstack-dev on
|
||||
Freenode.
|
19
README.rst
19
README.rst
@ -1,19 +0,0 @@
|
||||
========================
|
||||
Team and repository tags
|
||||
========================
|
||||
|
||||
.. image:: https://governance.openstack.org/tc/badges/python-senlinclient.svg
|
||||
:target: https://governance.openstack.org/tc/reference/tags/index.html
|
||||
|
||||
.. Change things from this point on
|
||||
|
||||
Python bindings to the Senlin Clustering API
|
||||
============================================
|
||||
|
||||
This is a client library for Senlin built on the Senlin clustering API. It
|
||||
provides a Python API (the ``senlinclient`` module) and a command-line tool
|
||||
(``senlin``).
|
||||
|
||||
Development takes place via the usual OpenStack processes as outlined in the
|
||||
`developer guide <http://docs.openstack.org/infra/manual/developers.html>`_.
|
||||
The master repository is in `Git <https://git.openstack.org/cgit/openstack/python-senlinclient>`_.
|
25
TODO
25
TODO
@ -1,25 +0,0 @@
|
||||
High Priority
|
||||
=============
|
||||
|
||||
- Support action_create
|
||||
- Support action_delete
|
||||
- Support action_cancel
|
||||
- Add support to HTTPS connection
|
||||
* This means a cert and key option using plain HTTP package, while
|
||||
it means using Transport when we switched to OpenStackSDK
|
||||
- Add checking for token based authentication
|
||||
- Add CLI argument checking, required vs optional
|
||||
|
||||
Middle Priority
|
||||
===============
|
||||
|
||||
- Use code from https://review.openstack.org/#/c/95679/ to replace
|
||||
_append_global_identity_args(parser)
|
||||
- Add unit tests
|
||||
- Figure out how to use access_info and reauthenticate parameters for
|
||||
authentication.
|
||||
- Support trust based authentication
|
||||
|
||||
Low Priority
|
||||
============
|
||||
|
2
doc/.gitignore
vendored
2
doc/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
build/
|
||||
source/ref/
|
177
doc/Makefile
177
doc/Makefile
@ -1,177 +0,0 @@
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/python-senlinclient.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/python-senlinclient.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/python-senlinclient"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/python-senlinclient"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
File diff suppressed because it is too large
Load Diff
@ -1,350 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
# python-senlinclient documentation build configuration file, created by
|
||||
# sphinx-quickstart on Sat Mar 21 08:52:42 2015.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import pbr.version
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'openstackdocstheme',
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = 'python-senlinclient'
|
||||
copyright = '2015, OpenStack'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
|
||||
senlin_version = pbr.version.VersionInfo('python-senlinclient')
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = senlin_version.version_string_with_vcs()
|
||||
# The short X.Y version.
|
||||
version = senlin_version.canonical_version_string()
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
# language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = []
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
|
||||
primary_domain = 'py'
|
||||
nitpicky = False
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
# keep_warnings = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'openstackdocs'
|
||||
|
||||
# openstackdocstheme options
|
||||
repository_name = 'openstack/python-senlinclient'
|
||||
bug_project = 'python-senlinclient'
|
||||
bug_tag = ''
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
# html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
# html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
# html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
# html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
# html_extra_path = []
|
||||
|
||||
# Must set this variable to include year, month, day, hours, and minutes.
|
||||
html_last_updated_fmt = '%Y-%m-%d %H:%M'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
# html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
# html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'python-senlinclientdoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
# 'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
# 'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
# 'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
('index', 'python-senlinclient.tex',
|
||||
'python-senlinclient Documentation',
|
||||
'OpenStack Foundation', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('man/senlin', 'senlin',
|
||||
u'Command line reference for Senlin',
|
||||
[u'Senlin Developers'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'Senlin', 'Senlin Documentation', u'Senlin Developers',
|
||||
'Senlin', 'One line description of project.', 'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
# texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
# texinfo_no_detailmenu = False
|
||||
|
||||
|
||||
# -- Options for Epub output ----------------------------------------------
|
||||
|
||||
# Bibliographic Dublin Core info.
|
||||
epub_title = 'python-senlinclient'
|
||||
epub_author = 'Senlin Developers'
|
||||
epub_publisher = 'OpenStack Foundation'
|
||||
epub_copyright = '2015, OpenStack'
|
||||
|
||||
# The basename for the epub file. It defaults to the project name.
|
||||
# epub_basename = 'python-senlinclient'
|
||||
|
||||
# The HTML theme for the epub output. Since the default themes are not
|
||||
# optimized for small screen space, using the same theme for HTML and epub
|
||||
# output is usually not wise.
|
||||
# This defaults to 'epub', a theme designed to save visual space.
|
||||
# epub_theme = 'epub'
|
||||
|
||||
# The language of the text. It defaults to the language option
|
||||
# or en if the language is not set.
|
||||
# epub_language = ''
|
||||
|
||||
# The scheme of the identifier. Typical schemes are ISBN or URL.
|
||||
# epub_scheme = ''
|
||||
|
||||
# The unique identifier of the text. This can be a ISBN number
|
||||
# or the project homepage.
|
||||
# epub_identifier = ''
|
||||
|
||||
# A unique identification for the text.
|
||||
# epub_uid = ''
|
||||
|
||||
# A tuple containing the cover image and cover page html template filenames.
|
||||
# epub_cover = ()
|
||||
|
||||
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
|
||||
# epub_guide = ()
|
||||
|
||||
# HTML files that should be inserted before the pages created by sphinx.
|
||||
# The format is a list of tuples containing the path and title.
|
||||
# epub_pre_files = []
|
||||
|
||||
# HTML files shat should be inserted after the pages created by sphinx.
|
||||
# The format is a list of tuples containing the path and title.
|
||||
# epub_post_files = []
|
||||
|
||||
# A list of files that should not be packed into the epub file.
|
||||
epub_exclude_files = ['search.html']
|
||||
|
||||
# The depth of the table of contents in toc.ncx.
|
||||
# epub_tocdepth = 3
|
||||
|
||||
# Allow duplicate toc entries.
|
||||
# epub_tocdup = True
|
||||
|
||||
# Choose between 'default' and 'includehidden'.
|
||||
# epub_tocscope = 'default'
|
||||
|
||||
# Fix unsupported image types using the PIL.
|
||||
# epub_fix_images = False
|
||||
|
||||
# Scale large images.
|
||||
# epub_max_image_width = 0
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
# epub_show_urls = 'inline'
|
||||
|
||||
# If false, no index is generated.
|
||||
# epub_use_index = True
|
@ -1,18 +0,0 @@
|
||||
===============================================
|
||||
Welcome to python-senlinclient's documentation!
|
||||
===============================================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
cli/index
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`search`
|
||||
|
@ -1,82 +0,0 @@
|
||||
:orphan:
|
||||
|
||||
======
|
||||
senlin
|
||||
======
|
||||
|
||||
.. program:: senlin
|
||||
|
||||
SYNOPSIS
|
||||
========
|
||||
|
||||
`senlin` [options] <command> [command-options]
|
||||
|
||||
`senlin help`
|
||||
|
||||
`senlin help` <command>
|
||||
|
||||
|
||||
DESCRIPTION
|
||||
===========
|
||||
|
||||
`senlin` is a command line client for controlling OpenStack Senlin.
|
||||
|
||||
Before the `senlin` command is issued, ensure the environment contains
|
||||
the necessary variables so that the CLI can pass user credentials to
|
||||
the server.
|
||||
See `Getting Credentials for a CLI` section of `OpenStack CLI Guide`
|
||||
for more info.
|
||||
|
||||
|
||||
OPTIONS
|
||||
=======
|
||||
|
||||
To get a list of available commands and options run::
|
||||
|
||||
senlin help
|
||||
|
||||
To get usage and options of a command run::
|
||||
|
||||
senlin help <command>
|
||||
|
||||
|
||||
EXAMPLES
|
||||
========
|
||||
|
||||
Get information about profile-create command::
|
||||
|
||||
senlin help profile-create
|
||||
|
||||
List available profiles::
|
||||
|
||||
senlin profile-list
|
||||
|
||||
List available clusters::
|
||||
|
||||
senlin cluster-list
|
||||
|
||||
Create a profile::
|
||||
|
||||
senlin profile-create -s profile.spec myprofile
|
||||
|
||||
View profile information::
|
||||
|
||||
senlin profile-show myprofile
|
||||
|
||||
Create a cluster::
|
||||
|
||||
senlin cluster-create -p myprofile -n 2 mycluster
|
||||
|
||||
List events::
|
||||
|
||||
senlin event-list
|
||||
|
||||
Delete a cluster::
|
||||
|
||||
senlin cluster-delete mycluster
|
||||
|
||||
BUGS
|
||||
====
|
||||
|
||||
Senlin client is hosted in Launchpad so you can view current bugs
|
||||
at https://bugs.launchpad.net/python-senlinclient/.
|
@ -1,4 +0,0 @@
|
||||
---
|
||||
other:
|
||||
- The 'senlin' CLI will be removed in April 2017. This message is now
|
||||
explicitly printed when senlin CLI commands are invoked.
|
@ -1,5 +0,0 @@
|
||||
---
|
||||
features:
|
||||
- A new command 'senlin cluster-collect' and its corresponding OSC plugin
|
||||
command has been added. This new command can be used to aggregate a
|
||||
specific property across a cluster.
|
@ -1,4 +0,0 @@
|
||||
---
|
||||
fixes:
|
||||
- The cluster policy list command was broken by new SDK changes and then
|
||||
fixed. The 'enabled' field is now renamed to 'is_enabled'.
|
@ -1,5 +0,0 @@
|
||||
---
|
||||
features:
|
||||
- A new CLI command 'senlin cluster-run' and a new OSC plugin command
|
||||
'openstack cluster run' have been added. Use the 'help' command to find
|
||||
out how to use it.
|
@ -1,5 +0,0 @@
|
||||
---
|
||||
upgrade:
|
||||
- OSC commands for cluster scaling are changed from 'cluster scale in'
|
||||
and 'cluster scale out' to 'cluster shrink' and 'cluster expand'
|
||||
respectively.
|
@ -1,5 +0,0 @@
|
||||
---
|
||||
features:
|
||||
- The senlin CLI 'node-delete' and the OSC plugin command
|
||||
'cluster node delete' now outputs the action IDs when successful. Error
|
||||
messages are printed when appropriate.
|
@ -1,4 +0,0 @@
|
||||
---
|
||||
features:
|
||||
- The senlinclient now supports API micro-versioning. Current supported
|
||||
version is 'clustering 1.2'.
|
@ -1,3 +0,0 @@
|
||||
---
|
||||
features:
|
||||
- Added command for node-check and node-recover.
|
@ -1,4 +0,0 @@
|
||||
---
|
||||
features:
|
||||
- A policy-validate command has been added to senlin command line.
|
||||
OSC support is added as well.
|
@ -1,4 +0,0 @@
|
||||
---
|
||||
features:
|
||||
- A profile-validate command has been added to command line. It can be
|
||||
used for validating the spec of a profile without creating it.
|
@ -1,3 +0,0 @@
|
||||
---
|
||||
features:
|
||||
- The support to python 3.5 has been verified and gated.
|
@ -1,5 +0,0 @@
|
||||
---
|
||||
other:
|
||||
- The receiver creation command (both senlin CLI and OSC plugin command)
|
||||
now allow 'cluster' and 'action' to be left unspecified if the receiver
|
||||
type is not 'webhook'.
|
@ -1,10 +0,0 @@
|
||||
---
|
||||
fixes:
|
||||
- Fix resource list operations for openstackclient.
|
||||
- Add filter "is_enabled" for policy binding list.
|
||||
- Fix policy binding operations including attach, detach and update.
|
||||
- Remove unsupported sort key "user" for event-list.
|
||||
- Fix metadata purging.
|
||||
- Add "cluster_id" colume for openstack cluster event list.
|
||||
- Support "global_project" arguments for action-list.
|
||||
- Fix resource update operations.
|
@ -1,10 +0,0 @@
|
||||
---
|
||||
features:
|
||||
- Support node replace operation.
|
||||
- Enhance the parameter check for "path" in cluster collect operation.
|
||||
- Help message for metadata clean operations.
|
||||
fixes:
|
||||
- Fix incorrect description of profile/policy validate operations.
|
||||
- Fix project_id and user_id show bug for profile/policy validate and
|
||||
cluster policy show operations.
|
||||
- Fix enabled option for senlin cluster-policy-detach command.
|
@ -1,285 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import pbr.version
|
||||
|
||||
# Senlin Release Notes documentation build configuration file, created by
|
||||
# sphinx-quickstart on Tue Nov 3 17:40:50 2015.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'reno.sphinxext',
|
||||
'openstackdocstheme',
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'Senlin Client Release Notes'
|
||||
copyright = u'2015, Senlin Developers'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
|
||||
senlin_version = pbr.version.VersionInfo('python-senlinclient')
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = senlin_version.version_string_with_vcs()
|
||||
# The short X.Y version.
|
||||
version = senlin_version.canonical_version_string()
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
# language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = []
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
# keep_warnings = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'openstackdocs'
|
||||
|
||||
# openstackdocstheme options
|
||||
repository_name = 'openstack/python-senlinclient'
|
||||
bug_project = 'python-senlinclient'
|
||||
bug_tag = ''
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
# html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
# html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
# html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
# html_extra_path = []
|
||||
|
||||
# Must set this variable to include year, month, day, hours, and minutes.
|
||||
html_last_updated_fmt = '%Y-%m-%d %H:%M'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
# html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
# html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'SenlinClientReleaseNotesdoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
# 'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
# 'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
# 'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
('index', 'SenlinClientReleaseNotes.tex',
|
||||
u'Senlin Client Release Notes Documentation',
|
||||
u'Senlin Developers', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'senlinclientreleasenotes',
|
||||
u'Senlin Client Release Notes Documentation',
|
||||
[u'Senlin Developers'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'SenlinClientReleaseNotes',
|
||||
u'Senlin Client Release Notes Documentation',
|
||||
u'Senlin Developers', 'SenlinClientReleaseNotes',
|
||||
'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
# texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
# texinfo_no_detailmenu = False
|
||||
|
||||
# -- Options for Internationalization output ------------------------------
|
||||
locale_dirs = ['locale/']
|
@ -1,10 +0,0 @@
|
||||
=============================
|
||||
Senlin Client Release Notes
|
||||
=============================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
unreleased
|
||||
ocata
|
||||
newton
|
@ -1,40 +0,0 @@
|
||||
# zzxwill <zzxwill@gmail.com>, 2016. #zanata
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: Senlin Client Release Notes 0.5.1\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2016-07-04 03:44+0000\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"PO-Revision-Date: 2016-06-25 02:41+0000\n"
|
||||
"Last-Translator: zzxwill <zzxwill@gmail.com>\n"
|
||||
"Language-Team: Chinese (China)\n"
|
||||
"Language: zh-CN\n"
|
||||
"X-Generator: Zanata 3.7.3\n"
|
||||
"Plural-Forms: nplurals=1; plural=0\n"
|
||||
|
||||
msgid "0.5.0"
|
||||
msgstr "0.5.0"
|
||||
|
||||
msgid "Added command for node-check and node-recover."
|
||||
msgstr "已为node-check和node-recover添加了命令。"
|
||||
|
||||
msgid "Current Series Release Notes"
|
||||
msgstr "当前版本发布说明"
|
||||
|
||||
msgid "New Features"
|
||||
msgstr "新特性"
|
||||
|
||||
msgid ""
|
||||
"OSC commands for cluster scaling are changed from 'cluster scale in' and "
|
||||
"'cluster scale out' to 'cluster shrink' and 'cluster expand' respectively."
|
||||
msgstr ""
|
||||
"集群扩展的OSC命令分别从'cluster scale in'和'cluster scale out'改成了'cluster "
|
||||
"shrink'和'cluster expand'。"
|
||||
|
||||
msgid "Senlin Client Release Notes"
|
||||
msgstr "Senlin Client发布说明"
|
||||
|
||||
msgid "Upgrade Notes"
|
||||
msgstr "升级说明"
|
@ -1,6 +0,0 @@
|
||||
===================================
|
||||
Newton Series Release Notes
|
||||
===================================
|
||||
|
||||
.. release-notes::
|
||||
:branch: origin/stable/newton
|
@ -1,6 +0,0 @@
|
||||
===================================
|
||||
Ocata Series Release Notes
|
||||
===================================
|
||||
|
||||
.. release-notes::
|
||||
:branch: origin/stable/ocata
|
@ -1,5 +0,0 @@
|
||||
==============================
|
||||
Current Series Release Notes
|
||||
==============================
|
||||
|
||||
.. release-notes::
|
@ -1,17 +0,0 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
Babel!=2.4.0,>=2.3.4 # BSD
|
||||
pbr!=2.1.0,>=2.0.0 # Apache-2.0
|
||||
PrettyTable<0.8,>=0.7.1 # BSD
|
||||
keystoneauth1!=3.0.0,>=2.21.0 # Apache-2.0
|
||||
openstacksdk>=0.9.17 # Apache-2.0
|
||||
osc-lib>=1.7.0 # Apache-2.0
|
||||
oslo.i18n!=3.15.2,>=2.1.0 # Apache-2.0
|
||||
oslo.serialization!=2.19.1,>=1.10.0 # Apache-2.0
|
||||
oslo.utils>=3.20.0 # Apache-2.0
|
||||
python-heatclient>=1.6.1 # Apache-2.0
|
||||
PyYAML>=3.10.0 # MIT
|
||||
requests>=2.14.2 # Apache-2.0
|
||||
six>=1.9.0 # MIT
|
123
run_tests.sh
123
run_tests.sh
@ -1,123 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
BASE_DIR=`dirname $0`
|
||||
|
||||
function usage {
|
||||
echo "Usage: $0 [OPTION]..."
|
||||
echo "Run senlinclient test suite(s)"
|
||||
echo ""
|
||||
echo " -V, --virtual-env Use virtualenv. Install automatically if not present."
|
||||
echo " (Default is to run tests in local environment)"
|
||||
echo " -F, --force Force a clean re-build of the virtual environment. "
|
||||
echo " Useful when dependencies have been added."
|
||||
echo " -f, --func Functional tests have been removed."
|
||||
echo " -u, --unit Run unit tests (default when nothing specified)"
|
||||
echo " -p, --pep8 Run pep8 tests"
|
||||
echo " --all Run pep8 and unit tests"
|
||||
echo " -c, --coverage Generate coverage report"
|
||||
echo " -d, --debug Run tests with testtools instead of testr."
|
||||
echo " This allows you to use the debugger."
|
||||
echo " -h, --help Print this usage message"
|
||||
exit
|
||||
}
|
||||
|
||||
# must not assign -a as an option, needed for selecting custom attributes
|
||||
no_venv=1
|
||||
function process_option {
|
||||
case "$1" in
|
||||
-V|--virtual-env) no_venv=0;;
|
||||
-F|--force) force=1;;
|
||||
-f|--func) test_func=1;;
|
||||
-u|--unit) test_unit=1;;
|
||||
-p|--pep8) test_pep8=1;;
|
||||
--all) test_unit=1; test_pep8=1;;
|
||||
-c|--coverage) coverage=1;;
|
||||
-d|--debug) debug=1;;
|
||||
-h|--help) usage;;
|
||||
*) args="$args $1"; test_unit=1;;
|
||||
esac
|
||||
}
|
||||
|
||||
venv=.venv
|
||||
with_venv=tools/with_venv.sh
|
||||
wrapper=""
|
||||
debug=0
|
||||
|
||||
function run_tests {
|
||||
echo 'Running tests'
|
||||
|
||||
if [ $debug -eq 1 ]; then
|
||||
echo "Debugging..."
|
||||
if [ "$args" = "" ]; then
|
||||
# Default to running all tests if specific test is not
|
||||
# provided.
|
||||
testrargs="discover ./senlinclient/tests"
|
||||
fi
|
||||
${wrapper} python -m testtools.run $args $testrargs
|
||||
|
||||
# Short circuit because all of the testr and coverage stuff
|
||||
# below does not make sense when running testtools.run for
|
||||
# debugging purposes.
|
||||
return $?
|
||||
fi
|
||||
|
||||
# Just run the test suites in current environment
|
||||
if [ -n "$args" ] ; then
|
||||
args="-t $args"
|
||||
fi
|
||||
${wrapper} python setup.py testr --slowest $args
|
||||
}
|
||||
|
||||
function run_pep8 {
|
||||
echo "Running flake8..."
|
||||
bash -c "${wrapper} flake8"
|
||||
}
|
||||
|
||||
# run unit tests with pep8 when no arguments are specified
|
||||
# otherwise process CLI options
|
||||
if [[ $# == 0 ]]; then
|
||||
test_pep8=1
|
||||
test_unit=1
|
||||
else
|
||||
for arg in "$@"; do
|
||||
process_option $arg
|
||||
done
|
||||
fi
|
||||
|
||||
if [ "$no_venv" == 0 ]
|
||||
then
|
||||
# Remove the virtual environment if --force used
|
||||
if [ "$force" == 1 ]; then
|
||||
echo "Cleaning virtualenv..."
|
||||
rm -rf ${venv}
|
||||
fi
|
||||
if [ -e ${venv} ]; then
|
||||
wrapper="${with_venv}"
|
||||
else
|
||||
# Automatically install the virtualenv
|
||||
python tools/install_venv.py
|
||||
wrapper="${with_venv}"
|
||||
fi
|
||||
fi
|
||||
|
||||
result=0
|
||||
|
||||
# If functional or unit tests have been selected, run them
|
||||
if [ "$test_unit" == 1 ] || [ "$debug" == 1 ] ; then
|
||||
run_tests
|
||||
result=$?
|
||||
fi
|
||||
|
||||
# Run pep8 if it was selected
|
||||
if [ "$test_pep8" == 1 ]; then
|
||||
run_pep8
|
||||
fi
|
||||
|
||||
# Generate coverage report
|
||||
if [ "$coverage" == 1 ]; then
|
||||
echo "Generating coverage report in ./cover"
|
||||
${wrapper} python setup.py testr --coverage --slowest
|
||||
${wrapper} python -m coverage report --show-missing
|
||||
fi
|
||||
|
||||
exit $result
|
@ -1,16 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import pbr.version
|
||||
|
||||
|
||||
__version__ = pbr.version.VersionInfo('python-senlinclient').version_string()
|
@ -1,180 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import argparse
|
||||
|
||||
from senlinclient.common.i18n import _
|
||||
from senlinclient.common import utils
|
||||
|
||||
|
||||
def add_global_identity_args(parser):
|
||||
parser.add_argument(
|
||||
'--os-auth-plugin', dest='auth_plugin', metavar='AUTH_PLUGIN',
|
||||
default=utils.env('OS_AUTH_PLUGIN', default=None),
|
||||
help=_('Authentication plugin, default to env[OS_AUTH_PLUGIN]'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-auth-url', dest='auth_url', metavar='AUTH_URL',
|
||||
default=utils.env('OS_AUTH_URL'),
|
||||
help=_('Defaults to env[OS_AUTH_URL]'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-project-id', dest='project_id', metavar='PROJECT_ID',
|
||||
default=utils.env('OS_PROJECT_ID'),
|
||||
help=_('Defaults to env[OS_PROJECT_ID].'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-project-name', dest='project_name', metavar='PROJECT_NAME',
|
||||
default=utils.env('OS_PROJECT_NAME'),
|
||||
help=_('Defaults to env[OS_PROJECT_NAME].'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-tenant-id', dest='tenant_id', metavar='TENANT_ID',
|
||||
default=utils.env('OS_TENANT_ID'),
|
||||
help=_('Defaults to env[OS_TENANT_ID].'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-tenant-name', dest='tenant_name', metavar='TENANT_NAME',
|
||||
default=utils.env('OS_TENANT_NAME'),
|
||||
help=_('Defaults to env[OS_TENANT_NAME].'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-domain-id', dest='domain_id', metavar='DOMAIN_ID',
|
||||
default=utils.env('OS_DOMAIN_ID'),
|
||||
help=_('Domain ID for scope of authorization, defaults to '
|
||||
'env[OS_DOMAIN_ID].'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-domain-name', dest='domain_name', metavar='DOMAIN_NAME',
|
||||
default=utils.env('OS_DOMAIN_NAME'),
|
||||
help=_('Domain name for scope of authorization, defaults to '
|
||||
'env[OS_DOMAIN_NAME].'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-project-domain-id', dest='project_domain_id',
|
||||
metavar='PROJECT_DOMAIN_ID',
|
||||
default=utils.env('OS_PROJECT_DOMAIN_ID'),
|
||||
help=_('Project domain ID for scope of authorization, defaults to '
|
||||
'env[OS_PROJECT_DOMAIN_ID].'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-project-domain-name', dest='project_domain_name',
|
||||
metavar='PROJECT_DOMAIN_NAME',
|
||||
default=utils.env('OS_PROJECT_DOMAIN_NAME'),
|
||||
help=_('Project domain name for scope of authorization, defaults to '
|
||||
'env[OS_PROJECT_DOMAIN_NAME].'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-user-domain-id', dest='user_domain_id',
|
||||
metavar='USER_DOMAIN_ID',
|
||||
default=utils.env('OS_USER_DOMAIN_ID'),
|
||||
help=_('User domain ID for scope of authorization, defaults to '
|
||||
'env[OS_USER_DOMAIN_ID].'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-user-domain-name', dest='user_domain_name',
|
||||
metavar='USER_DOMAIN_NAME',
|
||||
default=utils.env('OS_USER_DOMAIN_NAME'),
|
||||
help=_('User domain name for scope of authorization, defaults to '
|
||||
'env[OS_USER_DOMAIN_NAME].'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-username', dest='username', metavar='USERNAME',
|
||||
default=utils.env('OS_USERNAME'),
|
||||
help=_('Defaults to env[OS_USERNAME].'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-user-id', dest='user_id', metavar='USER_ID',
|
||||
default=utils.env('OS_USER_ID'),
|
||||
help=_('Defaults to env[OS_USER_ID].'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-password', dest='password', metavar='PASSWORD',
|
||||
default=utils.env('OS_PASSWORD'),
|
||||
help=_('Defaults to env[OS_PASSWORD]'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-trust-id', dest='trust_id', metavar='TRUST_ID',
|
||||
default=utils.env('OS_TRUST_ID'),
|
||||
help=_('Defaults to env[OS_TRUST_ID]'))
|
||||
|
||||
verify_group = parser.add_mutually_exclusive_group()
|
||||
|
||||
verify_group.add_argument(
|
||||
'--os-cacert', dest='verify', metavar='CA_BUNDLE_FILE',
|
||||
default=utils.env('OS_CACERT', default=True),
|
||||
help=_('Path of CA TLS certificate(s) used to verify the remote '
|
||||
'server\'s certificate. Without this option senlin looks '
|
||||
'for the default system CA certificates.'))
|
||||
|
||||
verify_group.add_argument(
|
||||
'--verify',
|
||||
action='store_true',
|
||||
help=_('Verify server certificate (default)'))
|
||||
|
||||
verify_group.add_argument(
|
||||
'--insecure', dest='verify', action='store_false',
|
||||
help=_('Explicitly allow senlinclient to perform "insecure SSL" '
|
||||
'(HTTPS) requests. The server\'s certificate will not be '
|
||||
'verified against any certificate authorities. This '
|
||||
'option should be used with caution.'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-token', dest='token', metavar='TOKEN',
|
||||
default=utils.env('OS_TOKEN', default=None),
|
||||
help=_('A string token to bootstrap the Keystone database, defaults '
|
||||
'to env[OS_TOKEN]'))
|
||||
|
||||
parser.add_argument(
|
||||
'--os-access-info', dest='access_info', metavar='ACCESS_INFO',
|
||||
default=utils.env('OS_ACCESS_INFO'),
|
||||
help=_('Access info, defaults to env[OS_ACCESS_INFO]'))
|
||||
|
||||
# parser.add_argument(
|
||||
# '--os-cert',
|
||||
# help=_('Path of certificate file to use in SSL connection. This '
|
||||
# 'file can optionally be prepended with the private key.'))
|
||||
#
|
||||
# parser.add_argument(
|
||||
# '--os-key',
|
||||
# help=_('Path of client key to use in SSL connection. This option is '
|
||||
# 'not necessary if your key is prepended to your cert file.'))
|
||||
|
||||
|
||||
def add_global_args(parser, version):
|
||||
# GLOBAL ARGUMENTS
|
||||
parser.add_argument(
|
||||
'-h', '--help', action='store_true',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument(
|
||||
'--version', action='version', version=version,
|
||||
help=_("Shows the client version and exits."))
|
||||
|
||||
parser.add_argument(
|
||||
'-d', '--debug', action='store_true',
|
||||
default=bool(utils.env('SENLINCLIENT_DEBUG')),
|
||||
help=_('Defaults to env[SENLINCLIENT_DEBUG].'))
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbose', action="store_true", default=False,
|
||||
help=_("Print more verbose output."))
|
||||
|
||||
parser.add_argument(
|
||||
'--api-timeout',
|
||||
help=_('Number of seconds to wait for an API response, '
|
||||
'defaults to system socket timeout'))
|
||||
|
||||
parser.add_argument(
|
||||
'--senlin-api-version',
|
||||
default=utils.env('SENLIN_API_VERSION', default='1'),
|
||||
help=_('Version number for Senlin API to use, Default to "1".'))
|
@ -1,23 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from senlinclient.common import utils
|
||||
|
||||
|
||||
def Client(api_ver, *args, **kwargs):
|
||||
"""Import versioned client module.
|
||||
|
||||
:param api_ver: API version required.
|
||||
"""
|
||||
module = utils.import_versioned_module(api_ver, 'client')
|
||||
cls = getattr(module, 'Client')
|
||||
return cls(*args, **kwargs)
|
@ -1,309 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from keystoneauth1.exceptions import base as kae_base
|
||||
from keystoneauth1.exceptions import http as kae_http
|
||||
from openstack import exceptions as sdkexc
|
||||
from oslo_serialization import jsonutils
|
||||
from requests import exceptions as reqexc
|
||||
import six
|
||||
|
||||
from senlinclient.common.i18n import _
|
||||
|
||||
verbose = False
|
||||
|
||||
|
||||
class BaseException(Exception):
|
||||
"""An error occurred."""
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
return self.message or self.__class__.__doc__
|
||||
|
||||
|
||||
class CommandError(BaseException):
|
||||
"""Invalid usage of CLI."""
|
||||
|
||||
|
||||
class FileFormatError(BaseException):
|
||||
"""Illegal file format detected."""
|
||||
|
||||
|
||||
class HTTPException(BaseException):
|
||||
"""Base exception for all HTTP-derived exceptions."""
|
||||
code = 'N/A'
|
||||
|
||||
def __init__(self, error=None):
|
||||
super(HTTPException, self).__init__(error)
|
||||
try:
|
||||
self.error = error
|
||||
if 'error' not in self.error:
|
||||
raise KeyError(_('Key "error" not exists'))
|
||||
except KeyError:
|
||||
# If key 'error' does not exist, self.message becomes
|
||||
# no sense. In this case, we return doc of current
|
||||
# exception class instead.
|
||||
self.error = {'error': {'message': self.__class__.__doc__}}
|
||||
except Exception:
|
||||
self.error = {'error':
|
||||
{'message': self.message or self.__class__.__doc__}}
|
||||
|
||||
def __str__(self):
|
||||
message = self.error['error'].get('message', 'Internal Error')
|
||||
if verbose:
|
||||
traceback = self.error['error'].get('traceback', '')
|
||||
return (_('ERROR: %(message)s\n%(traceback)s') %
|
||||
{'message': message, 'traceback': traceback})
|
||||
else:
|
||||
code = self.error['error'].get('code', 'Unknown')
|
||||
return _('ERROR(%(code)s): %(message)s') % {'code': code,
|
||||
'message': message}
|
||||
|
||||
|
||||
class ClientError(HTTPException):
|
||||
pass
|
||||
|
||||
|
||||
class ServerError(HTTPException):
|
||||
pass
|
||||
|
||||
|
||||
class HTTPBadRequest(ClientError):
|
||||
# 400
|
||||
pass
|
||||
|
||||
|
||||
class HTTPUnauthorized(ClientError):
|
||||
# 401
|
||||
pass
|
||||
|
||||
|
||||
class HTTPForbidden(ClientError):
|
||||
# 403
|
||||
pass
|
||||
|
||||
|
||||
class HTTPNotFound(ClientError):
|
||||
# 404
|
||||
pass
|
||||
|
||||
|
||||
class HTTPMethodNotAllowed(ClientError):
|
||||
# 405
|
||||
pass
|
||||
|
||||
|
||||
class HTTPNotAcceptable(ClientError):
|
||||
# 406
|
||||
pass
|
||||
|
||||
|
||||
class HTTPProxyAuthenticationRequired(ClientError):
|
||||
# 407
|
||||
pass
|
||||
|
||||
|
||||
class HTTPRequestTimeout(ClientError):
|
||||
# 408
|
||||
pass
|
||||
|
||||
|
||||
class HTTPConflict(ClientError):
|
||||
# 409
|
||||
pass
|
||||
|
||||
|
||||
class HTTPGone(ClientError):
|
||||
# 410
|
||||
pass
|
||||
|
||||
|
||||
class HTTPLengthRequired(ClientError):
|
||||
# 411
|
||||
pass
|
||||
|
||||
|
||||
class HTTPPreconditionFailed(ClientError):
|
||||
# 412
|
||||
pass
|
||||
|
||||
|
||||
class HTTPRequestEntityTooLarge(ClientError):
|
||||
# 413
|
||||
pass
|
||||
|
||||
|
||||
class HTTPRequestURITooLong(ClientError):
|
||||
# 414
|
||||
pass
|
||||
|
||||
|
||||
class HTTPUnsupportedMediaType(ClientError):
|
||||
# 415
|
||||
pass
|
||||
|
||||
|
||||
class HTTPRequestRangeNotSatisfiable(ClientError):
|
||||
# 416
|
||||
pass
|
||||
|
||||
|
||||
class HTTPExpectationFailed(ClientError):
|
||||
# 417
|
||||
pass
|
||||
|
||||
|
||||
class HTTPInternalServerError(ServerError):
|
||||
# 500
|
||||
pass
|
||||
|
||||
|
||||
class HTTPNotImplemented(ServerError):
|
||||
# 501
|
||||
pass
|
||||
|
||||
|
||||
class HTTPBadGateway(ServerError):
|
||||
# 502
|
||||
pass
|
||||
|
||||
|
||||
class HTTPServiceUnavailable(ServerError):
|
||||
# 503
|
||||
pass
|
||||
|
||||
|
||||
class HTTPGatewayTimeout(ServerError):
|
||||
# 504
|
||||
pass
|
||||
|
||||
|
||||
class HTTPVersionNotSupported(ServerError):
|
||||
# 505
|
||||
pass
|
||||
|
||||
|
||||
class ConnectionRefused(HTTPException):
|
||||
# 111
|
||||
pass
|
||||
|
||||
|
||||
_EXCEPTION_MAP = {
|
||||
111: ConnectionRefused,
|
||||
400: HTTPBadRequest,
|
||||
401: HTTPUnauthorized,
|
||||
403: HTTPForbidden,
|
||||
404: HTTPNotFound,
|
||||
405: HTTPMethodNotAllowed,
|
||||
406: HTTPNotAcceptable,
|
||||
407: HTTPProxyAuthenticationRequired,
|
||||
408: HTTPRequestTimeout,
|
||||
409: HTTPConflict,
|
||||
410: HTTPGone,
|
||||
411: HTTPLengthRequired,
|
||||
412: HTTPPreconditionFailed,
|
||||
413: HTTPRequestEntityTooLarge,
|
||||
414: HTTPRequestURITooLong,
|
||||
415: HTTPUnsupportedMediaType,
|
||||
416: HTTPRequestRangeNotSatisfiable,
|
||||
417: HTTPExpectationFailed,
|
||||
500: HTTPInternalServerError,
|
||||
501: HTTPNotImplemented,
|
||||
502: HTTPBadGateway,
|
||||
503: HTTPServiceUnavailable,
|
||||
504: HTTPGatewayTimeout,
|
||||
505: HTTPVersionNotSupported,
|
||||
}
|
||||
|
||||
|
||||
def parse_exception(exc):
|
||||
"""Parse exception code and yield useful information.
|
||||
|
||||
:param exc: details of the exception.
|
||||
"""
|
||||
if isinstance(exc, sdkexc.HttpException):
|
||||
if exc.details is None:
|
||||
data = exc.response.json()
|
||||
code = data.get('code', None)
|
||||
message = data.get('message', None)
|
||||
error = data.get('error', None)
|
||||
if error:
|
||||
record = {
|
||||
'error': {
|
||||
'code': exc.http_status,
|
||||
'message': message or exc.message
|
||||
}
|
||||
}
|
||||
else:
|
||||
info = data.values()[0]
|
||||
record = {
|
||||
'error': {
|
||||
'code': info.get('code', code),
|
||||
'message': info.get('message', message)
|
||||
}
|
||||
}
|
||||
else:
|
||||
try:
|
||||
record = jsonutils.loads(exc.details)
|
||||
except Exception:
|
||||
# If the exc.details is not in JSON format
|
||||
record = {
|
||||
'error': {
|
||||
'code': exc.http_status,
|
||||
'message': exc,
|
||||
}
|
||||
}
|
||||
elif isinstance(exc, reqexc.RequestException):
|
||||
# Exceptions that are not captured by SDK
|
||||
record = {
|
||||
'error': {
|
||||
'code': exc.message[1].errno,
|
||||
'message': exc.message[0],
|
||||
}
|
||||
}
|
||||
|
||||
elif isinstance(exc, six.string_types):
|
||||
record = jsonutils.loads(exc)
|
||||
# some exception from keystoneauth1 is not shaped by SDK
|
||||
elif isinstance(exc, kae_http.HttpError):
|
||||
record = {
|
||||
'error': {
|
||||
'code': exc.http_status,
|
||||
'message': exc.message
|
||||
}
|
||||
}
|
||||
elif isinstance(exc, kae_base.ClientException):
|
||||
record = {
|
||||
'error': {
|
||||
# other exceptions from keystoneauth1 is an internal
|
||||
# error to senlin, so set status code to 500
|
||||
'code': 500,
|
||||
'message': exc.message
|
||||
}
|
||||
}
|
||||
else:
|
||||
print(_('Unknown exception: %s') % exc)
|
||||
return
|
||||
|
||||
try:
|
||||
code = record['error']['code']
|
||||
except KeyError as err:
|
||||
print(_('Malformed exception record, missing field "%s"') % err)
|
||||
print(_('Original error record: %s') % record)
|
||||
return
|
||||
|
||||
if code in _EXCEPTION_MAP:
|
||||
inst = _EXCEPTION_MAP.get(code)
|
||||
raise inst(record)
|
||||
else:
|
||||
raise HTTPException(record)
|
@ -1,51 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from osc_lib.command import command
|
||||
|
||||
|
||||
class RawFormat(command.ShowOne):
|
||||
|
||||
def produce_output(self, parsed_args, column_names, data):
|
||||
if data is None:
|
||||
return
|
||||
|
||||
self.formatter.emit_one(column_names, data,
|
||||
self.app.stdout, parsed_args)
|
||||
|
||||
|
||||
class JsonFormat(RawFormat):
|
||||
|
||||
@property
|
||||
def formatter_default(self):
|
||||
return 'json'
|
||||
|
||||
|
||||
class YamlFormat(RawFormat):
|
||||
|
||||
@property
|
||||
def formatter_default(self):
|
||||
return 'yaml'
|
||||
|
||||
|
||||
class ShellFormat(RawFormat):
|
||||
|
||||
@property
|
||||
def formatter_default(self):
|
||||
return 'shell'
|
||||
|
||||
|
||||
class ValueFormat(RawFormat):
|
||||
|
||||
@property
|
||||
def formatter_default(self):
|
||||
return 'value'
|
@ -1,24 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
oslo_i18n integration module.
|
||||
See https://docs.openstack.org/oslo.i18n/latest/user/usage.html
|
||||
"""
|
||||
|
||||
import oslo_i18n
|
||||
|
||||
|
||||
_translators = oslo_i18n.TranslatorFactory(domain='senlinclient')
|
||||
|
||||
# The primary translation function using the well-known name "_"
|
||||
_ = _translators.primary
|
@ -1,37 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from openstack import connection
|
||||
from openstack import exceptions
|
||||
from openstack import profile
|
||||
|
||||
from senlinclient.common import exc
|
||||
|
||||
|
||||
def create_connection(prof=None, user_agent=None, **kwargs):
|
||||
if not prof:
|
||||
prof = profile.Profile()
|
||||
interface = kwargs.pop('interface', None)
|
||||
region_name = kwargs.pop('region_name', None)
|
||||
if interface:
|
||||
prof.set_interface('clustering', interface)
|
||||
if region_name:
|
||||
prof.set_region('clustering', region_name)
|
||||
|
||||
prof.set_api_version('clustering', '1.5')
|
||||
try:
|
||||
conn = connection.Connection(profile=prof, user_agent=user_agent,
|
||||
**kwargs)
|
||||
except exceptions.HttpException as ex:
|
||||
exc.parse_exception(ex.details)
|
||||
|
||||
return conn
|
@ -1,282 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from heatclient.common import template_utils
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import encodeutils
|
||||
from oslo_utils import importutils
|
||||
import prettytable
|
||||
import six
|
||||
import yaml
|
||||
|
||||
from senlinclient.common import exc
|
||||
from senlinclient.common.i18n import _
|
||||
|
||||
|
||||
supported_formats = {
|
||||
"json": lambda x: jsonutils.dumps(x, indent=2),
|
||||
"yaml": lambda x: yaml.safe_dump(x, default_flow_style=False)
|
||||
}
|
||||
|
||||
|
||||
def arg(*args, **kwargs):
|
||||
"""Decorator for CLI args."""
|
||||
|
||||
def _decorator(func):
|
||||
if not hasattr(func, 'arguments'):
|
||||
func.arguments = []
|
||||
|
||||
if (args, kwargs) not in func.arguments:
|
||||
func.arguments.insert(0, (args, kwargs))
|
||||
|
||||
return func
|
||||
|
||||
return _decorator
|
||||
|
||||
|
||||
def env(*args, **kwargs):
|
||||
"""Returns the first environment variable set.
|
||||
|
||||
If all are empty, defaults to '' or keyword arg `default`.
|
||||
"""
|
||||
for arg in args:
|
||||
value = os.environ.get(arg)
|
||||
if value:
|
||||
return value
|
||||
return kwargs.get('default', '')
|
||||
|
||||
|
||||
def import_versioned_module(version, submodule=None):
|
||||
module = 'senlinclient.v%s' % version
|
||||
if submodule:
|
||||
module = '.'.join((module, submodule))
|
||||
return importutils.import_module(module)
|
||||
|
||||
|
||||
def format_nested_dict(d, fields, column_names):
|
||||
if d is None:
|
||||
return ''
|
||||
pt = prettytable.PrettyTable(caching=False, print_empty=False,
|
||||
header=True, field_names=column_names)
|
||||
for n in column_names:
|
||||
pt.align[n] = 'l'
|
||||
|
||||
keys = sorted(d.keys())
|
||||
for field in keys:
|
||||
value = d[field]
|
||||
if not isinstance(value, six.string_types):
|
||||
value = jsonutils.dumps(value, indent=2, ensure_ascii=False)
|
||||
if value is None:
|
||||
value = '-'
|
||||
pt.add_row([field, value.strip('"')])
|
||||
|
||||
return pt.get_string()
|
||||
|
||||
|
||||
def nested_dict_formatter(d, column_names):
|
||||
return lambda o: format_nested_dict(o, d, column_names)
|
||||
|
||||
|
||||
def json_formatter(js):
|
||||
return jsonutils.dumps(js, indent=2, ensure_ascii=False)
|
||||
|
||||
|
||||
def list_formatter(record):
|
||||
return '\n'.join(record or [])
|
||||
|
||||
|
||||
def _print_list(objs, fields, formatters=None, sortby_index=0,
|
||||
mixed_case_fields=None, field_labels=None):
|
||||
"""Print a list of objects as a table, one row per object.
|
||||
|
||||
:param objs: iterable of :class:`Resource`
|
||||
:param fields: attributes that correspond to columns, in order
|
||||
:param formatters: `dict` of callables for field formatting
|
||||
:param sortby_index: index of the field for sorting table rows
|
||||
:param mixed_case_fields: fields corresponding to object attributes that
|
||||
have mixed case names (e.g., 'serverId')
|
||||
:param field_labels: Labels to use in the heading of the table, default to
|
||||
fields.
|
||||
"""
|
||||
formatters = formatters or {}
|
||||
mixed_case_fields = mixed_case_fields or []
|
||||
field_labels = field_labels or fields
|
||||
if len(field_labels) != len(fields):
|
||||
raise ValueError(_("Field labels list %(labels)s has different number "
|
||||
"of elements than fields list %(fields)s"),
|
||||
{'labels': field_labels, 'fields': fields})
|
||||
|
||||
if sortby_index is None:
|
||||
kwargs = {}
|
||||
else:
|
||||
kwargs = {'sortby': field_labels[sortby_index]}
|
||||
pt = prettytable.PrettyTable(field_labels)
|
||||
pt.align = 'l'
|
||||
|
||||
for o in objs:
|
||||
row = []
|
||||
for field in fields:
|
||||
if field in formatters:
|
||||
data = formatters[field](o)
|
||||
else:
|
||||
if field in mixed_case_fields:
|
||||
field_name = field.replace(' ', '_')
|
||||
else:
|
||||
field_name = field.lower().replace(' ', '_')
|
||||
data = getattr(o, field_name, '')
|
||||
if data is None:
|
||||
data = '-'
|
||||
row.append(data)
|
||||
pt.add_row(row)
|
||||
|
||||
if six.PY3:
|
||||
return encodeutils.safe_encode(pt.get_string(**kwargs)).decode()
|
||||
else:
|
||||
return encodeutils.safe_encode(pt.get_string(**kwargs))
|
||||
|
||||
|
||||
def print_list(objs, fields, formatters=None, sortby_index=0,
|
||||
mixed_case_fields=None, field_labels=None):
|
||||
# This wrapper is needed because sdk may yield a generator that will
|
||||
# escape the exception catching previously
|
||||
if not objs:
|
||||
objs = []
|
||||
|
||||
try:
|
||||
res = _print_list(objs, fields, formatters=formatters,
|
||||
sortby_index=sortby_index,
|
||||
mixed_case_fields=mixed_case_fields,
|
||||
field_labels=field_labels)
|
||||
print(res)
|
||||
except Exception as ex:
|
||||
exc.parse_exception(ex)
|
||||
|
||||
|
||||
def print_dict(d, formatters=None):
|
||||
formatters = formatters or {}
|
||||
pt = prettytable.PrettyTable(['Property', 'Value'],
|
||||
caching=False, print_empty=False)
|
||||
pt.align = 'l'
|
||||
|
||||
for field in d.keys():
|
||||
if field in formatters:
|
||||
data = formatters[field](d[field])
|
||||
else:
|
||||
data = d[field]
|
||||
if data is None:
|
||||
data = '-'
|
||||
pt.add_row([field, data])
|
||||
|
||||
content = pt.get_string(sortby='Property')
|
||||
if six.PY3:
|
||||
print(encodeutils.safe_encode(content).decode())
|
||||
else:
|
||||
print(encodeutils.safe_encode(content))
|
||||
|
||||
|
||||
def print_action_result(rid, res):
|
||||
if res[0] == "OK":
|
||||
output = _("accepted by action %s") % res[1]
|
||||
else:
|
||||
output = _("failed due to '%s'") % res[1]
|
||||
print(_(" %(cid)s: %(output)s") % {"cid": rid, "output": output})
|
||||
|
||||
|
||||
def format_parameters(params, parse_semicolon=True):
|
||||
"""Reformat parameters into dict of format expected by the API."""
|
||||
if not params or params == ['{}']:
|
||||
return {}
|
||||
|
||||
if parse_semicolon:
|
||||
# expect multiple invocations of --parameters but fall back to ';'
|
||||
# delimited if only one --parameters is specified
|
||||
if len(params) == 1:
|
||||
params = params[0].split(';')
|
||||
|
||||
parameters = {}
|
||||
for p in params:
|
||||
try:
|
||||
(n, v) = p.split(('='), 1)
|
||||
except ValueError:
|
||||
msg = _('Malformed parameter(%s). Use the key=value format.') % p
|
||||
raise exc.CommandError(msg)
|
||||
|
||||
if n not in parameters:
|
||||
parameters[n] = v
|
||||
else:
|
||||
if not isinstance(parameters[n], list):
|
||||
parameters[n] = [parameters[n]]
|
||||
parameters[n].append(v)
|
||||
|
||||
return parameters
|
||||
|
||||
|
||||
def get_spec_content(filename):
|
||||
with open(filename, 'r') as f:
|
||||
try:
|
||||
data = yaml.safe_load(f)
|
||||
except Exception as ex:
|
||||
raise exc.CommandError(_('The specified file is not a valid '
|
||||
'YAML file: %s') % six.text_type(ex))
|
||||
return data
|
||||
|
||||
|
||||
def process_stack_spec(spec):
|
||||
# Heat stack is a headache, because it demands for client side file
|
||||
# content processing
|
||||
try:
|
||||
tmplfile = spec.get('template', None)
|
||||
except AttributeError as ex:
|
||||
raise exc.FileFormatError(_('The specified file is not a valid '
|
||||
'YAML file: %s') % six.text_type(ex))
|
||||
if not tmplfile:
|
||||
raise exc.FileFormatError(_('No template found in the given '
|
||||
'spec file'))
|
||||
|
||||
tpl_files, template = template_utils.get_template_contents(
|
||||
template_file=tmplfile)
|
||||
|
||||
env_files, env = template_utils.process_multiple_environments_and_files(
|
||||
env_paths=spec.get('environment', None))
|
||||
|
||||
new_spec = {
|
||||
# TODO(Qiming): add context support
|
||||
'disable_rollback': spec.get('disable_rollback', True),
|
||||
'context': spec.get('context', {}),
|
||||
'parameters': spec.get('parameters', {}),
|
||||
'timeout': spec.get('timeout', 60),
|
||||
'template': template,
|
||||
'files': dict(list(tpl_files.items()) + list(env_files.items())),
|
||||
'environment': env
|
||||
}
|
||||
|
||||
return new_spec
|
||||
|
||||
|
||||
def format_output(output, format='yaml'):
|
||||
fmt = format.lower()
|
||||
try:
|
||||
return supported_formats[fmt](output)
|
||||
except KeyError:
|
||||
raise exc.HTTPUnsupported(_('The format(%s) is unsupported.')
|
||||
% fmt)
|
||||
|
||||
|
||||
def exit(msg=''):
|
||||
if msg:
|
||||
print(msg, file=sys.stderr)
|
||||
sys.exit(1)
|
@ -1,21 +0,0 @@
|
||||
# Zheng Xi Zhou <zzxwill@gmail.com>, 2016. #zanata
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: python-senlinclient 0.4.2.dev33\n"
|
||||
"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n"
|
||||
"POT-Creation-Date: 2016-06-06 07:37+0000\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"PO-Revision-Date: 2016-06-16 05:48+0000\n"
|
||||
"Last-Translator: Zheng Xi Zhou <zzxwill@gmail.com>\n"
|
||||
"Language-Team: Chinese (China)\n"
|
||||
"Language: zh-CN\n"
|
||||
"X-Generator: Zanata 3.7.3\n"
|
||||
"Plural-Forms: nplurals=1; plural=0\n"
|
||||
|
||||
msgid "Ctrl-c detected."
|
||||
msgstr "检测到Ctrl-c。"
|
||||
|
||||
msgid "Ctrl-d detected"
|
||||
msgstr "检测到Ctrl-d。"
|
@ -1,19 +0,0 @@
|
||||
# Zheng Xi Zhou <zzxwill@gmail.com>, 2016. #zanata
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: python-senlinclient 0.4.2.dev33\n"
|
||||
"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n"
|
||||
"POT-Creation-Date: 2016-06-06 07:37+0000\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"PO-Revision-Date: 2016-06-16 05:49+0000\n"
|
||||
"Last-Translator: Zheng Xi Zhou <zzxwill@gmail.com>\n"
|
||||
"Language-Team: Chinese (China)\n"
|
||||
"Language: zh-CN\n"
|
||||
"X-Generator: Zanata 3.7.3\n"
|
||||
"Plural-Forms: nplurals=1; plural=0\n"
|
||||
|
||||
#, python-format
|
||||
msgid "\"%(old)s\" is deprecated, please use \"%(new)s\" instead."
|
||||
msgstr "\"%(old)s\"已弃用,请使用\"%(new)s\"。"
|
File diff suppressed because it is too large
Load Diff
@ -1,53 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""OpenStackClient plugin for Clustering service."""
|
||||
|
||||
import logging
|
||||
|
||||
from openstack import connection
|
||||
from openstack import profile
|
||||
from osc_lib import utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_CLUSTERING_API_VERSION = '1'
|
||||
API_VERSION_OPTION = 'os_clustering_api_version'
|
||||
API_NAME = 'clustering'
|
||||
CURRENT_API_VERSION = '1.5'
|
||||
|
||||
|
||||
def make_client(instance):
|
||||
"""Returns a clustering proxy"""
|
||||
prof = profile.Profile()
|
||||
prof.set_api_version(API_NAME, CURRENT_API_VERSION)
|
||||
|
||||
conn = connection.Connection(profile=prof,
|
||||
authenticator=instance.session.auth)
|
||||
LOG.debug('Connection: %s', conn)
|
||||
LOG.debug('Clustering client initialized using OpenStackSDK: %s',
|
||||
conn.cluster)
|
||||
return conn.cluster
|
||||
|
||||
|
||||
def build_option_parser(parser):
|
||||
"""Hook to add global options"""
|
||||
parser.add_argument(
|
||||
'--os-clustering-api-version',
|
||||
metavar='<clustering-api-version>',
|
||||
default=utils.env(
|
||||
'OS_CLUSTERING_API_VERSION',
|
||||
default=DEFAULT_CLUSTERING_API_VERSION),
|
||||
help='Clustering API version, default=' +
|
||||
DEFAULT_CLUSTERING_API_VERSION +
|
||||
' (Env: OS_CLUSTERING_API_VERSION)')
|
||||
return parser
|
@ -1,312 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Command-line interface to the Senlin clustering API.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from oslo_utils import encodeutils
|
||||
from oslo_utils import importutils
|
||||
import six
|
||||
|
||||
import senlinclient
|
||||
from senlinclient import cliargs
|
||||
from senlinclient import client as senlin_client
|
||||
from senlinclient.common import exc
|
||||
from senlinclient.common.i18n import _
|
||||
from senlinclient.common import utils
|
||||
|
||||
osprofiler_profiler = importutils.try_import("osprofiler.profiler")
|
||||
USER_AGENT = 'python-senlinclient'
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HelpFormatter(argparse.HelpFormatter):
|
||||
def start_section(self, heading):
|
||||
# Title-case the headings
|
||||
heading = '%s%s' % (heading[0].upper(), heading[1:])
|
||||
super(HelpFormatter, self).start_section(heading)
|
||||
|
||||
|
||||
class SenlinShell(object):
|
||||
def _setup_logging(self, debug):
|
||||
log_lvl = logging.DEBUG if debug else logging.WARNING
|
||||
logging.basicConfig(format="%(levelname)s (%(module)s) %(message)s",
|
||||
level=log_lvl)
|
||||
logging.getLogger('iso8601').setLevel(logging.WARNING)
|
||||
logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)
|
||||
|
||||
def _setup_verbose(self, verbose):
|
||||
if verbose:
|
||||
exc.verbose = 1
|
||||
|
||||
def _find_actions(self, subparsers, actions_module):
|
||||
for attr in (a for a in dir(actions_module) if a.startswith('do_')):
|
||||
command = attr[3:].replace('_', '-')
|
||||
callback = getattr(actions_module, attr)
|
||||
|
||||
# get callback documentation string
|
||||
desc = callback.__doc__ or ''
|
||||
help = desc.strip().split('\n')[0]
|
||||
arguments = getattr(callback, 'arguments', [])
|
||||
|
||||
subparser = subparsers.add_parser(command,
|
||||
help=help,
|
||||
description=desc,
|
||||
add_help=False,
|
||||
formatter_class=HelpFormatter)
|
||||
|
||||
subparser.add_argument('-h', '--help',
|
||||
action='help',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
for (args, kwargs) in arguments:
|
||||
subparser.add_argument(*args, **kwargs)
|
||||
subparser.set_defaults(func=callback)
|
||||
|
||||
self.subcommands[command] = subparser
|
||||
|
||||
def do_bash_completion(self, args):
|
||||
"""Prints all of the commands and options to stdout.
|
||||
|
||||
The senlin.bash_completion script doesn't have to hard code them.
|
||||
"""
|
||||
commands = set()
|
||||
options = set()
|
||||
for sc_str, sc in self.subcommands.items():
|
||||
if sc_str == 'bash-completion':
|
||||
continue
|
||||
|
||||
commands.add(sc_str)
|
||||
for option in list(sc._optionals._option_string_actions):
|
||||
options.add(option)
|
||||
|
||||
print(' '.join(commands | options))
|
||||
|
||||
def add_profiler_args(self, parser):
|
||||
if osprofiler_profiler:
|
||||
parser.add_argument(
|
||||
'--os-profile',
|
||||
metavar='HMAC_KEY',
|
||||
default=utils.env('OS_PROFILE'),
|
||||
help=_('HMAC key to use for encrypting context data for '
|
||||
'performance profiling of operation. This key should '
|
||||
'be the value of HMAC key configured in '
|
||||
'senlin configuration (/etc/senlin/senlin.conf). '
|
||||
'Without the key, profiling will not be triggered '
|
||||
'even if osprofiler is enabled on server side.'))
|
||||
|
||||
def get_subcommand_parser(self, base_parser, version):
|
||||
parser = base_parser
|
||||
|
||||
self.subcommands = {}
|
||||
subparsers = parser.add_subparsers(metavar='<subcommand>')
|
||||
submodule = utils.import_versioned_module(version, 'shell')
|
||||
self._find_actions(subparsers, submodule)
|
||||
self._find_actions(subparsers, self)
|
||||
|
||||
return parser
|
||||
|
||||
@utils.arg('command', metavar='<subcommand>', nargs='?',
|
||||
help=_('Display help for <subcommand>.'))
|
||||
def do_help(self, args):
|
||||
"""Display help about this program or one of its subcommands."""
|
||||
if getattr(args, 'command', None):
|
||||
if args.command in self.subcommands:
|
||||
self.subcommands[args.command].print_help()
|
||||
else:
|
||||
raise exc.CommandError("'%s' is not a valid subcommand" %
|
||||
args.command)
|
||||
else:
|
||||
self.parser.print_help()
|
||||
|
||||
def _check_identity_arguments(self, args):
|
||||
# TODO(Qiming): validate the token authentication path and the trust
|
||||
# authentication path
|
||||
|
||||
if not args.auth_url:
|
||||
msg = _('You must provide an auth url via --os-auth-url (or '
|
||||
' env[OS_AUTH_URL])')
|
||||
raise exc.CommandError(msg)
|
||||
|
||||
# username or user_id or token must be specified
|
||||
if not (args.username or args.user_id or args.token):
|
||||
msg = _('You must provide a user name, a user_id or a '
|
||||
'token for authentication')
|
||||
raise exc.CommandError(msg)
|
||||
|
||||
# if both username and user_id are specified, user_id takes precedence
|
||||
if (args.username and args.user_id):
|
||||
msg = _('Both user name and user ID are specified, Senlin will '
|
||||
'use user ID for authentication')
|
||||
print(_('WARNING: %s') % msg)
|
||||
|
||||
if 'v3' in args.auth_url:
|
||||
if (args.username and not args.user_id):
|
||||
if not (args.user_domain_id or args.user_domain_name):
|
||||
msg = _('Either user domain ID (--user-domain-id / '
|
||||
'env[OS_USER_DOMAIN_ID]) or user domain name '
|
||||
'(--user-domain-name / env[OS_USER_DOMAIN_NAME]) '
|
||||
'must be specified, because user name may not be '
|
||||
'unique.')
|
||||
raise exc.CommandError(msg)
|
||||
|
||||
# password is needed if username or user_id is present
|
||||
if (args.username or args.user_id) and not (args.password):
|
||||
msg = _('You must provide a password for user %s') % (
|
||||
args.username or args.user_id)
|
||||
raise exc.CommandError(msg)
|
||||
|
||||
# project name or ID is needed, or else sdk may find the wrong project
|
||||
if (not (args.project_id or args.project_name or args.tenant_id or
|
||||
args.tenant_name)):
|
||||
if not (args.user_id):
|
||||
msg = _('Either project/tenant ID or project/tenant name '
|
||||
'must be specified, or else Senlin cannot know '
|
||||
'which project to use.')
|
||||
raise exc.CommandError(msg)
|
||||
else:
|
||||
msg = _('Neither project ID nor project name is specified. '
|
||||
'Senlin will use user\'s default project which may '
|
||||
'result in authentication error.')
|
||||
print(_('WARNING: %s') % msg)
|
||||
|
||||
# both project name and ID are specified, ID takes precedence
|
||||
if ((args.project_id or args.tenant_id) and
|
||||
(args.project_name or args.tenant_name)):
|
||||
msg = _('Both project/tenant name and project/tenant ID are '
|
||||
'specified, Senlin will use project ID for '
|
||||
'authentication')
|
||||
print(_('WARNING: %s') % msg)
|
||||
|
||||
# project name may not be unique
|
||||
if 'v3' in args.auth_url:
|
||||
if (not (args.project_id or args.tenant_id) and
|
||||
(args.project_name or args.tenant_name) and
|
||||
not (args.project_domain_id or args.project_domain_name)):
|
||||
msg = _('Either project domain ID (--project-domain-id / '
|
||||
'env[OS_PROJECT_DOMAIN_ID]) orr project domain name '
|
||||
'(--project-domain-name / '
|
||||
'env[OS_PROJECT_DOMAIN_NAME]) must be specified, '
|
||||
'because project/tenant name may not be unique.')
|
||||
raise exc.CommandError(msg)
|
||||
|
||||
def _setup_senlin_client(self, api_ver, args):
|
||||
"""Create senlin client using given args."""
|
||||
kwargs = {
|
||||
'auth_plugin': args.auth_plugin or 'password',
|
||||
'auth_url': args.auth_url,
|
||||
'project_name': args.project_name or args.tenant_name,
|
||||
'project_id': args.project_id or args.tenant_id,
|
||||
'domain_name': args.domain_name,
|
||||
'domain_id': args.domain_id,
|
||||
'project_domain_name': args.project_domain_name,
|
||||
'project_domain_id': args.project_domain_id,
|
||||
'user_domain_name': args.user_domain_name,
|
||||
'user_domain_id': args.user_domain_id,
|
||||
'username': args.username,
|
||||
'user_id': args.user_id,
|
||||
'password': args.password,
|
||||
'verify': args.verify,
|
||||
'token': args.token,
|
||||
'trust_id': args.trust_id,
|
||||
}
|
||||
|
||||
return senlin_client.Client('1', user_agent=USER_AGENT, **kwargs)
|
||||
|
||||
def main(self, argv):
|
||||
# Parse args once to find version
|
||||
parser = argparse.ArgumentParser(
|
||||
prog='senlin',
|
||||
description=__doc__.strip(),
|
||||
epilog=_('Type "senlin help <COMMAND>" for help on a specific '
|
||||
'command.'),
|
||||
add_help=False,
|
||||
formatter_class=HelpFormatter,
|
||||
)
|
||||
|
||||
cliargs.add_global_args(parser, version=senlinclient.__version__)
|
||||
cliargs.add_global_identity_args(parser)
|
||||
self.add_profiler_args(parser)
|
||||
base_parser = parser
|
||||
|
||||
(options, args) = base_parser.parse_known_args(argv)
|
||||
|
||||
self._setup_logging(options.debug)
|
||||
self._setup_verbose(options.verbose)
|
||||
|
||||
# build available subcommands based on version
|
||||
api_ver = options.senlin_api_version
|
||||
LOG.info(api_ver)
|
||||
subcommand_parser = self.get_subcommand_parser(base_parser, api_ver)
|
||||
self.parser = subcommand_parser
|
||||
|
||||
# Handle top-level --help/-h before attempting to parse
|
||||
# a command off the command line
|
||||
if not args and options.help or not argv:
|
||||
self.do_help(options)
|
||||
return 0
|
||||
|
||||
# Parse args again and call whatever callback was selected
|
||||
args = subcommand_parser.parse_args(argv)
|
||||
|
||||
# Short-circuit and deal with help command right away.
|
||||
if args.func == self.do_help:
|
||||
self.do_help(args)
|
||||
return 0
|
||||
elif args.func == self.do_bash_completion:
|
||||
self.do_bash_completion(args)
|
||||
return 0
|
||||
|
||||
# Check if identity information are sufficient
|
||||
self._check_identity_arguments(args)
|
||||
|
||||
# Setup Senlin client connection
|
||||
sc = self._setup_senlin_client(api_ver, args)
|
||||
|
||||
os_profile = osprofiler_profiler and options.os_profile
|
||||
if os_profile:
|
||||
osprofiler_profiler.init(options.os_profile)
|
||||
|
||||
args.func(sc.service, args)
|
||||
|
||||
if os_profile:
|
||||
trace_id = osprofiler_profiler.get().get_base_id()
|
||||
print(_("Trace ID: %s") % trace_id)
|
||||
print(_("To display trace use next command:\n"
|
||||
"osprofiler trace show --html %s ") % trace_id)
|
||||
|
||||
|
||||
def main(args=None):
|
||||
try:
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
|
||||
SenlinShell().main(args)
|
||||
except KeyboardInterrupt:
|
||||
print(_("... terminating senlin client"), file=sys.stderr)
|
||||
return 130
|
||||
except Exception as e:
|
||||
if '--debug' in args or '-d' in args:
|
||||
raise
|
||||
else:
|
||||
print(encodeutils.safe_encode(six.text_type(e)), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
@ -1,32 +0,0 @@
|
||||
#!/bin/bash -xe
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# This script is executed inside post_test_hook function in devstack gate.
|
||||
|
||||
export SENLINCLIENT_DIR="$BASE/new/python-senlinclient"
|
||||
|
||||
cd $BASE/new/devstack
|
||||
source openrc admin admin
|
||||
|
||||
# Run tests
|
||||
echo "Running senlinclient functional test."
|
||||
set +e
|
||||
|
||||
# TODO(Anyone): Running senlinclient functional test by running
|
||||
# "tox -efunctional"
|
||||
|
||||
set -e
|
||||
echo "Running senlinclient functional test succeeded."
|
||||
|
||||
exit 0
|
@ -1,34 +0,0 @@
|
||||
#!/bin/bash -xe
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# This script is executed inside pre_test_hook function in devstack gate.
|
||||
|
||||
export localconf=$BASE/new/devstack/local.conf
|
||||
export SENLIN_CONF=/etc/senlin/senlin.conf
|
||||
export SENLIN_BACKEND=${SENLIN_BACKEND:-'openstack_test'}
|
||||
|
||||
_LOG_CFG='default_log_levels ='
|
||||
_LOG_CFG+='amqp=WARN,amqplib=WARN,sqlalchemy=WARN,oslo_messaging=WARN'
|
||||
_LOG_CFG+=',iso8601=WARN,requests.packages.urllib3.connectionpool=WARN'
|
||||
_LOG_CFG+=',urllib3.connectionpool=WARN'
|
||||
_LOG_CFG+=',requests.packages.urllib3.util.retry=WARN,urllib3.util.retry=WARN'
|
||||
_LOG_CFG+=',keystonemiddleware=WARN'
|
||||
_LOG_CFG+=',routes.middleware=WARN'
|
||||
_LOG_CFG+=',stevedore=WARN'
|
||||
_LOG_CFG+=',oslo_messaging._drivers.amqp=WARN'
|
||||
_LOG_CFG+=',oslo_messaging._drivers.amqpdriver=WARN'
|
||||
|
||||
echo -e '[[post-config|$SENLIN_CONF]]\n[DEFAULT]\n' >> $localconf
|
||||
echo -e "cloud_backend=$SENLIN_BACKEND\n" >> $localconf
|
||||
echo -e $_LOG_CFG >> $localconf
|
@ -1,8 +0,0 @@
|
||||
type: senlin.policy.deletion
|
||||
version: 1.0
|
||||
description: A policy for choosing victim node(s) from a cluster for deletion.
|
||||
properties:
|
||||
criteria: OLDEST_FIRST
|
||||
destroy_after_deletion: True
|
||||
grace_period: 60
|
||||
reduce_desired_capacity: False
|
@ -1,6 +0,0 @@
|
||||
type: os.nova.server
|
||||
version: 1.0
|
||||
properties:
|
||||
name: cirros_server
|
||||
flavor: 1
|
||||
image: cirros-0.3.4-x86_64-uec
|
@ -1,25 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from senlinclient.common import utils
|
||||
|
||||
|
||||
def do_command_foo(sc, args):
|
||||
"""Pydoc for command foo."""
|
||||
return
|
||||
|
||||
|
||||
@utils.arg('-F', '--flag', metavar='<FLAG>', help='Flag desc.')
|
||||
@utils.arg('arg1', metavar='<ARG1>', help='Arg1 desc')
|
||||
def do_command_bar(sc, args):
|
||||
"""This is the command doc."""
|
||||
return
|
@ -1,74 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
import testtools
|
||||
|
||||
from senlinclient import cliargs
|
||||
|
||||
|
||||
class TestCliArgs(testtools.TestCase):
|
||||
|
||||
def test_add_global_identity_args(self):
|
||||
parser = mock.Mock()
|
||||
|
||||
cliargs.add_global_identity_args(parser)
|
||||
expected = [
|
||||
'--os-auth-plugin',
|
||||
'--os-auth-url',
|
||||
'--os-project-id',
|
||||
'--os-project-name',
|
||||
'--os-tenant-id',
|
||||
'--os-tenant-name',
|
||||
'--os-domain-id',
|
||||
'--os-domain-name',
|
||||
'--os-project-domain-id',
|
||||
'--os-project-domain-name',
|
||||
'--os-user-domain-id',
|
||||
'--os-user-domain-name',
|
||||
'--os-username',
|
||||
'--os-user-id',
|
||||
'--os-password',
|
||||
'--os-trust-id',
|
||||
'--os-token',
|
||||
'--os-access-info',
|
||||
]
|
||||
|
||||
options = [arg[0][0] for arg in parser.add_argument.call_args_list]
|
||||
self.assertEqual(expected, options)
|
||||
|
||||
parser.add_mutually_exclusive_group.assert_called_once_with()
|
||||
group = parser.add_mutually_exclusive_group.return_value
|
||||
|
||||
verify_opts = [arg[0][0] for arg in group.add_argument.call_args_list]
|
||||
verify_args = [
|
||||
'--os-cacert',
|
||||
'--verify',
|
||||
'--insecure'
|
||||
]
|
||||
self.assertEqual(verify_args, verify_opts)
|
||||
|
||||
def test_add_global_args(self):
|
||||
parser = mock.Mock()
|
||||
|
||||
cliargs.add_global_args(parser, '1')
|
||||
expected = [
|
||||
'-h',
|
||||
'--version',
|
||||
'-d',
|
||||
'-v',
|
||||
'--api-timeout',
|
||||
'--senlin-api-version'
|
||||
]
|
||||
|
||||
options = [arg[0][0] for arg in parser.add_argument.call_args_list]
|
||||
self.assertEqual(expected, options)
|
@ -1,39 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
import testtools
|
||||
|
||||
from senlinclient import client as sc
|
||||
from senlinclient.common import utils
|
||||
|
||||
|
||||
class FakeClient(object):
|
||||
|
||||
def __init__(self, session):
|
||||
super(FakeClient, self).__init__()
|
||||
self.session = session
|
||||
|
||||
|
||||
class ClientTest(testtools.TestCase):
|
||||
|
||||
@mock.patch.object(utils, 'import_versioned_module')
|
||||
def test_client_init(self, mock_import):
|
||||
the_module = mock.Mock()
|
||||
the_module.Client = FakeClient
|
||||
mock_import.return_value = the_module
|
||||
session = mock.Mock()
|
||||
|
||||
res = sc.Client('FAKE_VER', session)
|
||||
|
||||
mock_import.assert_called_once_with('FAKE_VER', 'client')
|
||||
self.assertIsInstance(res, FakeClient)
|
@ -1,89 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import yaml
|
||||
|
||||
from osc_lib.tests import utils
|
||||
from senlinclient.common import format_utils
|
||||
|
||||
|
||||
columns = ['col1', 'col2', 'col3']
|
||||
data = ['abcde', ['fg', 'hi', 'jk'], {'lmnop': 'qrstu'}]
|
||||
|
||||
|
||||
class ShowJson(format_utils.JsonFormat):
|
||||
def take_action(self, parsed_args):
|
||||
return columns, data
|
||||
|
||||
|
||||
class ShowYaml(format_utils.YamlFormat):
|
||||
def take_action(self, parsed_args):
|
||||
return columns, data
|
||||
|
||||
|
||||
class ShowShell(format_utils.ShellFormat):
|
||||
def take_action(self, parsed_args):
|
||||
return columns, data
|
||||
|
||||
|
||||
class ShowValue(format_utils.ValueFormat):
|
||||
def take_action(self, parsed_args):
|
||||
return columns, data
|
||||
|
||||
|
||||
class TestFormats(utils.TestCommand):
|
||||
|
||||
def test_json_format(self):
|
||||
self.cmd = ShowJson(self.app, None)
|
||||
parsed_args = self.check_parser(self.cmd, [], [])
|
||||
expected = json.dumps(dict(zip(columns, data)), indent=2)
|
||||
|
||||
self.cmd.run(parsed_args)
|
||||
|
||||
self.assertEqual(expected, self.app.stdout.make_string())
|
||||
|
||||
def test_yaml_format(self):
|
||||
self.cmd = ShowYaml(self.app, None)
|
||||
parsed_args = self.check_parser(self.cmd, [], [])
|
||||
expected = yaml.safe_dump(dict(zip(columns, data)),
|
||||
default_flow_style=False)
|
||||
|
||||
self.cmd.run(parsed_args)
|
||||
|
||||
self.assertEqual(expected, self.app.stdout.make_string())
|
||||
|
||||
def test_shell_format(self):
|
||||
self.cmd = ShowShell(self.app, None)
|
||||
parsed_args = self.check_parser(self.cmd, [], [])
|
||||
expected = '''\
|
||||
col1="abcde"
|
||||
col2="['fg', 'hi', 'jk']"
|
||||
col3="{'lmnop': 'qrstu'}"
|
||||
'''
|
||||
|
||||
self.cmd.run(parsed_args)
|
||||
|
||||
self.assertEqual(expected, self.app.stdout.make_string())
|
||||
|
||||
def test_value_format(self):
|
||||
self.cmd = ShowValue(self.app, None)
|
||||
parsed_args = self.check_parser(self.cmd, [], [])
|
||||
expected = '''\
|
||||
abcde
|
||||
['fg', 'hi', 'jk']
|
||||
{'lmnop': 'qrstu'}
|
||||
'''
|
||||
|
||||
self.cmd.run(parsed_args)
|
||||
|
||||
self.assertEqual(expected, self.app.stdout.make_string())
|
@ -1,65 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
from openstack import connection as sdk_connection
|
||||
from openstack import profile as sdk_profile
|
||||
import testtools
|
||||
|
||||
from senlinclient.common import sdk
|
||||
|
||||
|
||||
class TestSdk(testtools.TestCase):
|
||||
|
||||
@mock.patch.object(sdk_connection, 'Connection')
|
||||
def test_create_connection_with_profile(self, mock_connection):
|
||||
mock_prof = mock.Mock()
|
||||
mock_conn = mock.Mock()
|
||||
mock_connection.return_value = mock_conn
|
||||
kwargs = {
|
||||
'user_id': '123',
|
||||
'password': 'abc',
|
||||
'auth_url': 'test_url'
|
||||
}
|
||||
res = sdk.create_connection(mock_prof, **kwargs)
|
||||
mock_connection.assert_called_once_with(profile=mock_prof,
|
||||
user_agent=None,
|
||||
user_id='123',
|
||||
password='abc',
|
||||
auth_url='test_url')
|
||||
self.assertEqual(mock_conn, res)
|
||||
|
||||
@mock.patch.object(sdk_connection, 'Connection')
|
||||
@mock.patch.object(sdk_profile, 'Profile')
|
||||
def test_create_connection_without_profile(self, mock_profile,
|
||||
mock_connection):
|
||||
mock_prof = mock.Mock()
|
||||
mock_conn = mock.Mock()
|
||||
mock_profile.return_value = mock_prof
|
||||
mock_connection.return_value = mock_conn
|
||||
kwargs = {
|
||||
'interface': 'public',
|
||||
'region_name': 'RegionOne',
|
||||
'user_id': '123',
|
||||
'password': 'abc',
|
||||
'auth_url': 'test_url'
|
||||
}
|
||||
res = sdk.create_connection(**kwargs)
|
||||
|
||||
mock_prof.set_interface.assert_called_once_with('clustering', 'public')
|
||||
mock_prof.set_region.assert_called_once_with('clustering', 'RegionOne')
|
||||
mock_connection.assert_called_once_with(profile=mock_prof,
|
||||
user_agent=None,
|
||||
user_id='123',
|
||||
password='abc',
|
||||
auth_url='test_url')
|
||||
self.assertEqual(mock_conn, res)
|
@ -1,354 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import mock
|
||||
import six
|
||||
from six.moves import builtins
|
||||
import testtools
|
||||
|
||||
from senlinclient import client as senlin_client
|
||||
from senlinclient.common import exc
|
||||
from senlinclient.common.i18n import _
|
||||
from senlinclient.common import sdk
|
||||
from senlinclient.common import utils
|
||||
from senlinclient import shell
|
||||
from senlinclient.tests.unit import fakes
|
||||
|
||||
|
||||
class HelpFormatterTest(testtools.TestCase):
|
||||
|
||||
def test_start_section(self):
|
||||
fmtr = shell.HelpFormatter('senlin')
|
||||
res = fmtr.start_section(('heading', 'text1', 30))
|
||||
self.assertIsNone(res)
|
||||
h = fmtr._current_section.heading
|
||||
self.assertEqual("HEADING('text1', 30)", h)
|
||||
|
||||
|
||||
class TestArgs(testtools.TestCase):
|
||||
|
||||
def __init__(self):
|
||||
self.auth_url = 'http://fakeurl/v3'
|
||||
self.auth_plugin = 'test_plugin'
|
||||
self.username = 'test_user_name'
|
||||
self.user_id = 'test_user_id'
|
||||
self.token = 'test_token'
|
||||
self.project_id = 'test_project_id'
|
||||
self.project_name = 'test_project_name'
|
||||
self.tenant_id = 'test_tenant_id'
|
||||
self.tenant_name = 'test_tenant_name'
|
||||
self.password = 'test_password'
|
||||
self.user_domain_id = 'test_user_domain_id'
|
||||
self.user_domain_name = 'test_user_domain_name'
|
||||
self.project_domain_id = 'test_project_domain_id'
|
||||
self.project_domain_name = 'test_project_domain_name'
|
||||
self.domain_name = 'test_domain_name'
|
||||
self.domain_id = 'test_domain_id'
|
||||
self.verify = 'test_verify'
|
||||
self.user_preferences = 'test_preferences'
|
||||
self.trust_id = 'test_trust'
|
||||
|
||||
|
||||
class ShellTest(testtools.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(ShellTest, self).setUp()
|
||||
|
||||
def SHELL(self, func, *args, **kwargs):
|
||||
orig_out = sys.stdout
|
||||
sys.stdout = six.StringIO()
|
||||
func(*args, **kwargs)
|
||||
output = sys.stdout.getvalue()
|
||||
sys.stdout.close()
|
||||
sys.stdout = orig_out
|
||||
|
||||
return output
|
||||
|
||||
@mock.patch.object(logging, 'basicConfig')
|
||||
@mock.patch.object(logging, 'getLogger')
|
||||
def test_setup_logging_debug(self, x_get, x_config):
|
||||
sh = shell.SenlinShell()
|
||||
sh._setup_logging(True)
|
||||
|
||||
x_config.assert_called_once_with(
|
||||
format="%(levelname)s (%(module)s) %(message)s",
|
||||
level=logging.DEBUG)
|
||||
mock_calls = [
|
||||
mock.call('iso8601'),
|
||||
mock.call().setLevel(logging.WARNING),
|
||||
mock.call('urllib3.connectionpool'),
|
||||
mock.call().setLevel(logging.WARNING),
|
||||
]
|
||||
x_get.assert_has_calls(mock_calls)
|
||||
|
||||
@mock.patch.object(logging, 'basicConfig')
|
||||
@mock.patch.object(logging, 'getLogger')
|
||||
def test_setup_logging_no_debug(self, x_get, x_config):
|
||||
sh = shell.SenlinShell()
|
||||
sh._setup_logging(False)
|
||||
|
||||
x_config.assert_called_once_with(
|
||||
format="%(levelname)s (%(module)s) %(message)s",
|
||||
level=logging.WARNING)
|
||||
mock_calls = [
|
||||
mock.call('iso8601'),
|
||||
mock.call().setLevel(logging.WARNING),
|
||||
mock.call('urllib3.connectionpool'),
|
||||
mock.call().setLevel(logging.WARNING),
|
||||
]
|
||||
x_get.assert_has_calls(mock_calls)
|
||||
|
||||
def test_setup_verbose(self):
|
||||
sh = shell.SenlinShell()
|
||||
sh._setup_verbose(True)
|
||||
self.assertEqual(1, exc.verbose)
|
||||
|
||||
sh._setup_verbose(False)
|
||||
self.assertEqual(1, exc.verbose)
|
||||
|
||||
def test_find_actions(self):
|
||||
sh = shell.SenlinShell()
|
||||
sh.subcommands = {}
|
||||
subparsers = mock.Mock()
|
||||
x_subparser1 = mock.Mock()
|
||||
x_subparser2 = mock.Mock()
|
||||
x_add_parser = mock.Mock(side_effect=[x_subparser1, x_subparser2])
|
||||
subparsers.add_parser = x_add_parser
|
||||
|
||||
# subparsers.add_parser = mock.Mock(return_value=x_subparser)
|
||||
sh._find_actions(subparsers, fakes)
|
||||
|
||||
self.assertEqual({'command-bar': x_subparser1,
|
||||
'command-foo': x_subparser2},
|
||||
sh.subcommands)
|
||||
add_calls = [
|
||||
mock.call('command-bar', help='This is the command doc.',
|
||||
description='This is the command doc.',
|
||||
add_help=False,
|
||||
formatter_class=shell.HelpFormatter),
|
||||
mock.call('command-foo', help='Pydoc for command foo.',
|
||||
description='Pydoc for command foo.',
|
||||
add_help=False,
|
||||
formatter_class=shell.HelpFormatter),
|
||||
]
|
||||
x_add_parser.assert_has_calls(add_calls)
|
||||
|
||||
calls_1 = [
|
||||
mock.call('-h', '--help', action='help',
|
||||
help=argparse.SUPPRESS),
|
||||
mock.call('-F', '--flag', metavar='<FLAG>',
|
||||
help='Flag desc.'),
|
||||
mock.call('arg1', metavar='<ARG1>',
|
||||
help='Arg1 desc')
|
||||
]
|
||||
x_subparser1.add_argument.assert_has_calls(calls_1)
|
||||
x_subparser1.set_defaults.assert_called_once_with(
|
||||
func=fakes.do_command_bar)
|
||||
|
||||
calls_2 = [
|
||||
mock.call('-h', '--help', action='help',
|
||||
help=argparse.SUPPRESS),
|
||||
]
|
||||
x_subparser2.add_argument.assert_has_calls(calls_2)
|
||||
x_subparser2.set_defaults.assert_called_once_with(
|
||||
func=fakes.do_command_foo)
|
||||
|
||||
def test_do_bash_completion(self):
|
||||
sh = shell.SenlinShell()
|
||||
sc1 = mock.Mock()
|
||||
sc2 = mock.Mock()
|
||||
sc1._optionals._option_string_actions = ('A1', 'A2', 'C')
|
||||
sc2._optionals._option_string_actions = ('B1', 'B2', 'C')
|
||||
sh.subcommands = {
|
||||
'command-foo': sc1,
|
||||
'command-bar': sc2,
|
||||
'bash-completion': None,
|
||||
}
|
||||
|
||||
output = self.SHELL(sh.do_bash_completion, None)
|
||||
|
||||
output = output.split('\n')[0]
|
||||
output_list = output.split(' ')
|
||||
for option in ('A1', 'A2', 'C', 'B1', 'B2',
|
||||
'command-foo', 'command-bar'):
|
||||
self.assertIn(option, output_list)
|
||||
|
||||
def test_do_add_profiler_args(self):
|
||||
sh = shell.SenlinShell()
|
||||
parser = mock.Mock()
|
||||
|
||||
sh.add_profiler_args(parser)
|
||||
|
||||
self.assertEqual(0, parser.add_argument.call_count)
|
||||
|
||||
@mock.patch.object(utils, 'import_versioned_module')
|
||||
@mock.patch.object(shell.SenlinShell, '_find_actions')
|
||||
def test_get_subcommand_parser(self, x_find, x_import):
|
||||
x_base = mock.Mock()
|
||||
x_module = mock.Mock()
|
||||
x_import.return_value = x_module
|
||||
sh = shell.SenlinShell()
|
||||
|
||||
res = sh.get_subcommand_parser(x_base, 'v100')
|
||||
|
||||
self.assertEqual(x_base, res)
|
||||
x_base.add_subparsers.assert_called_once_with(
|
||||
metavar='<subcommand>')
|
||||
x_subparsers = x_base.add_subparsers.return_value
|
||||
x_import.assert_called_once_with('v100', 'shell')
|
||||
find_calls = [
|
||||
mock.call(x_subparsers, x_module),
|
||||
mock.call(x_subparsers, sh)
|
||||
]
|
||||
|
||||
x_find.assert_has_calls(find_calls)
|
||||
|
||||
@mock.patch.object(argparse.ArgumentParser, 'print_help')
|
||||
def test_do_help(self, mock_print):
|
||||
sh = shell.SenlinShell()
|
||||
args = mock.Mock()
|
||||
args.command = mock.Mock()
|
||||
sh.subcommands = {args.command: argparse.ArgumentParser}
|
||||
sh.do_help(args)
|
||||
self.assertTrue(mock_print.called)
|
||||
|
||||
sh.subcommands = {}
|
||||
ex = self.assertRaises(exc.CommandError,
|
||||
sh.do_help, args)
|
||||
msg = _("'%s' is not a valid subcommand") % args.command
|
||||
self.assertEqual(msg, six.text_type(ex))
|
||||
|
||||
@mock.patch.object(builtins, 'print')
|
||||
def test_check_identity_arguments(self, mock_print):
|
||||
sh = shell.SenlinShell()
|
||||
# auth_url is not specified.
|
||||
args = TestArgs()
|
||||
args.auth_url = None
|
||||
ex = self.assertRaises(exc.CommandError,
|
||||
sh._check_identity_arguments, args)
|
||||
msg = _('You must provide an auth url via --os-auth-url (or '
|
||||
' env[OS_AUTH_URL])')
|
||||
self.assertEqual(msg, six.text_type(ex))
|
||||
# username, user_id and token are not specified.
|
||||
args = TestArgs()
|
||||
args.username = None
|
||||
args.user_id = None
|
||||
args.token = None
|
||||
msg = _('You must provide a user name, a user_id or a '
|
||||
'token for authentication')
|
||||
ex = self.assertRaises(exc.CommandError,
|
||||
sh._check_identity_arguments, args)
|
||||
self.assertEqual(msg, six.text_type(ex))
|
||||
# Both username and user_id are specified.
|
||||
args = TestArgs()
|
||||
args.project_id = None
|
||||
args.tenant_id = None
|
||||
sh._check_identity_arguments(args)
|
||||
msg = _('WARNING: Both user name and user ID are specified, '
|
||||
'Senlin will use user ID for authentication')
|
||||
mock_print.assert_called_with(msg)
|
||||
|
||||
# 'v3' in auth_url but neither user_domain_id nor user_domain_name
|
||||
# is specified.
|
||||
args = TestArgs()
|
||||
args.user_id = None
|
||||
args.user_domain_id = None
|
||||
args.user_domain_name = None
|
||||
msg = _('Either user domain ID (--user-domain-id / '
|
||||
'env[OS_USER_DOMAIN_ID]) or user domain name '
|
||||
'(--user-domain-name / env[OS_USER_DOMAIN_NAME]) '
|
||||
'must be specified, because user name may not be '
|
||||
'unique.')
|
||||
ex = self.assertRaises(exc.CommandError,
|
||||
sh._check_identity_arguments, args)
|
||||
self.assertEqual(msg, six.text_type(ex))
|
||||
# user_id, project_id, project_name, tenant_id and tenant_name are all
|
||||
# not specified.
|
||||
args = TestArgs()
|
||||
args.project_id = None
|
||||
args.project_name = None
|
||||
args.tenant_id = None
|
||||
args.tenant_name = None
|
||||
args.user_id = None
|
||||
msg = _('Either project/tenant ID or project/tenant name '
|
||||
'must be specified, or else Senlin cannot know '
|
||||
'which project to use.')
|
||||
ex = self.assertRaises(exc.CommandError,
|
||||
sh._check_identity_arguments, args)
|
||||
self.assertEqual(msg, six.text_type(ex))
|
||||
args.user_id = 'test_user_id'
|
||||
sh._check_identity_arguments(args)
|
||||
msg = _('Neither project ID nor project name is specified. '
|
||||
'Senlin will use user\'s default project which may '
|
||||
'result in authentication error.')
|
||||
mock_print.assert_called_with(_('WARNING: %s') % msg)
|
||||
|
||||
# Both project_name and project_id are specified
|
||||
args = TestArgs()
|
||||
args.user_id = None
|
||||
sh._check_identity_arguments(args)
|
||||
msg = _('Both project/tenant name and project/tenant ID are '
|
||||
'specified, Senlin will use project ID for '
|
||||
'authentication')
|
||||
mock_print.assert_called_with(_('WARNING: %s') % msg)
|
||||
# Project name may not be unique
|
||||
args = TestArgs()
|
||||
args.user_id = None
|
||||
args.project_id = None
|
||||
args.tenant_id = None
|
||||
args.project_domain_id = None
|
||||
args.project_domain_name = None
|
||||
msg = _('Either project domain ID (--project-domain-id / '
|
||||
'env[OS_PROJECT_DOMAIN_ID]) orr project domain name '
|
||||
'(--project-domain-name / '
|
||||
'env[OS_PROJECT_DOMAIN_NAME]) must be specified, '
|
||||
'because project/tenant name may not be unique.')
|
||||
ex = self.assertRaises(exc.CommandError,
|
||||
sh._check_identity_arguments, args)
|
||||
self.assertEqual(msg, six.text_type(ex))
|
||||
|
||||
@mock.patch.object(sdk, 'create_connection')
|
||||
def test_setup_senlinclient(self, mock_conn):
|
||||
USER_AGENT = 'python-senlinclient'
|
||||
args = TestArgs()
|
||||
kwargs = {
|
||||
'auth_plugin': args.auth_plugin,
|
||||
'auth_url': args.auth_url,
|
||||
'project_name': args.project_name or args.tenant_name,
|
||||
'project_id': args.project_id or args.tenant_id,
|
||||
'domain_name': args.domain_name,
|
||||
'domain_id': args.domain_id,
|
||||
'project_domain_name': args.project_domain_name,
|
||||
'project_domain_id': args.project_domain_id,
|
||||
'user_domain_name': args.user_domain_name,
|
||||
'user_domain_id': args.user_domain_id,
|
||||
'username': args.username,
|
||||
'user_id': args.user_id,
|
||||
'password': args.password,
|
||||
'verify': args.verify,
|
||||
'token': args.token,
|
||||
'trust_id': args.trust_id,
|
||||
}
|
||||
sh = shell.SenlinShell()
|
||||
conn = mock.Mock()
|
||||
mock_conn.return_value = conn
|
||||
conn.session = mock.Mock()
|
||||
sh._setup_senlin_client('1', args)
|
||||
mock_conn.assert_called_once_with(prof=None, user_agent=USER_AGENT,
|
||||
**kwargs)
|
||||
client = mock.Mock()
|
||||
senlin_client.Client = mock.Mock(return_value=client)
|
||||
self.assertEqual(client, sh._setup_senlin_client('1', args))
|
@ -1,250 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import collections
|
||||
import sys
|
||||
|
||||
from heatclient.common import template_utils
|
||||
import mock
|
||||
import six
|
||||
import testtools
|
||||
|
||||
from senlinclient.common import exc
|
||||
from senlinclient.common.i18n import _
|
||||
from senlinclient.common import utils
|
||||
|
||||
|
||||
class CaptureStdout(object):
|
||||
"""Context manager for capturing stdout from statements in its block."""
|
||||
def __enter__(self):
|
||||
self.real_stdout = sys.stdout
|
||||
self.stringio = six.StringIO()
|
||||
sys.stdout = self.stringio
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
sys.stdout = self.real_stdout
|
||||
self.stringio.seek(0)
|
||||
self.read = self.stringio.read
|
||||
|
||||
|
||||
class shellTest(testtools.TestCase):
|
||||
|
||||
def test_format_parameter(self):
|
||||
params = ['status=ACTIVE;name=cluster1']
|
||||
format_params = {'status': 'ACTIVE', 'name': 'cluster1'}
|
||||
self.assertEqual(format_params,
|
||||
utils.format_parameters(params))
|
||||
|
||||
def test_format_parameter_split(self):
|
||||
params = ['status=ACTIVE', 'name=cluster1']
|
||||
format_params = {'status': 'ACTIVE', 'name': 'cluster1'}
|
||||
self.assertEqual(format_params,
|
||||
utils.format_parameters(params))
|
||||
|
||||
def test_format_parameter_none_dict(self):
|
||||
params = ['{}']
|
||||
self.assertEqual({}, utils.format_parameters(params))
|
||||
|
||||
def test_format_parameter_none(self):
|
||||
self.assertEqual({}, utils.format_parameters(None))
|
||||
|
||||
def test_format_parameter_bad_format(self):
|
||||
params = ['status:ACTIVE;name:cluster1']
|
||||
ex = self.assertRaises(exc.CommandError,
|
||||
utils.format_parameters,
|
||||
params)
|
||||
msg = _('Malformed parameter(status:ACTIVE). '
|
||||
'Use the key=value format.')
|
||||
self.assertEqual(msg, six.text_type(ex))
|
||||
|
||||
@mock.patch.object(template_utils,
|
||||
'process_multiple_environments_and_files')
|
||||
@mock.patch.object(template_utils, 'get_template_contents')
|
||||
def test_process_stack_spec(self, mock_get_temp, mock_process):
|
||||
spec = {
|
||||
'template': 'temp.yaml',
|
||||
'disable_rollback': True,
|
||||
'context': {
|
||||
'region_name': 'RegionOne'
|
||||
},
|
||||
}
|
||||
tpl_files = {'fake_key1': 'fake_value1'}
|
||||
template = mock.Mock()
|
||||
mock_get_temp.return_value = tpl_files, template
|
||||
env_files = {'fake_key2': 'fake_value2'}
|
||||
env = mock.Mock()
|
||||
mock_process.return_value = env_files, env
|
||||
new_spec = utils.process_stack_spec(spec)
|
||||
stack_spec = {
|
||||
'disable_rollback': True,
|
||||
'context': {
|
||||
'region_name': 'RegionOne',
|
||||
},
|
||||
'parameters': {},
|
||||
'timeout': 60,
|
||||
'template': template,
|
||||
'files': {
|
||||
'fake_key1': 'fake_value1',
|
||||
'fake_key2': 'fake_value2',
|
||||
},
|
||||
'environment': env
|
||||
}
|
||||
self.assertEqual(stack_spec, new_spec)
|
||||
mock_get_temp.assert_called_once_with(template_file='temp.yaml')
|
||||
mock_process.assert_called_once_with(env_paths=None)
|
||||
|
||||
def test_json_formatter_with_empty_json(self):
|
||||
params = {}
|
||||
self.assertEqual('{}', utils.json_formatter(params))
|
||||
|
||||
def test_list_formatter_with_list(self):
|
||||
params = ['foo', 'bar']
|
||||
self.assertEqual('foo\nbar', utils.list_formatter(params))
|
||||
|
||||
def test_list_formatter_with_empty_list(self):
|
||||
params = []
|
||||
self.assertEqual('', utils.list_formatter(params))
|
||||
|
||||
|
||||
class PrintListTestCase(testtools.TestCase):
|
||||
|
||||
def test_print_list_with_list(self):
|
||||
Row = collections.namedtuple('Row', ['foo', 'bar'])
|
||||
to_print = [Row(foo='fake_foo1', bar='fake_bar2'),
|
||||
Row(foo='fake_foo2', bar='fake_bar1')]
|
||||
with CaptureStdout() as cso:
|
||||
utils.print_list(to_print, ['foo', 'bar'])
|
||||
# Output should be sorted by the first key (foo)
|
||||
self.assertEqual("""\
|
||||
+-----------+-----------+
|
||||
| foo | bar |
|
||||
+-----------+-----------+
|
||||
| fake_foo1 | fake_bar2 |
|
||||
| fake_foo2 | fake_bar1 |
|
||||
+-----------+-----------+
|
||||
""", cso.read())
|
||||
|
||||
def test_print_list_with_None_string(self):
|
||||
Row = collections.namedtuple('Row', ['foo', 'bar'])
|
||||
to_print = [Row(foo='fake_foo1', bar='None'),
|
||||
Row(foo='fake_foo2', bar='fake_bar1')]
|
||||
with CaptureStdout() as cso:
|
||||
utils.print_list(to_print, ['foo', 'bar'])
|
||||
# Output should be sorted by the first key (foo)
|
||||
self.assertEqual("""\
|
||||
+-----------+-----------+
|
||||
| foo | bar |
|
||||
+-----------+-----------+
|
||||
| fake_foo1 | None |
|
||||
| fake_foo2 | fake_bar1 |
|
||||
+-----------+-----------+
|
||||
""", cso.read())
|
||||
|
||||
def test_print_list_with_None_data(self):
|
||||
Row = collections.namedtuple('Row', ['foo', 'bar'])
|
||||
to_print = [Row(foo='fake_foo1', bar=None),
|
||||
Row(foo='fake_foo2', bar='fake_bar1')]
|
||||
with CaptureStdout() as cso:
|
||||
utils.print_list(to_print, ['foo', 'bar'])
|
||||
# Output should be sorted by the first key (foo)
|
||||
self.assertEqual("""\
|
||||
+-----------+-----------+
|
||||
| foo | bar |
|
||||
+-----------+-----------+
|
||||
| fake_foo1 | - |
|
||||
| fake_foo2 | fake_bar1 |
|
||||
+-----------+-----------+
|
||||
""", cso.read())
|
||||
|
||||
def test_print_list_with_list_sortby(self):
|
||||
Row = collections.namedtuple('Row', ['foo', 'bar'])
|
||||
to_print = [Row(foo='fake_foo1', bar='fake_bar2'),
|
||||
Row(foo='fake_foo2', bar='fake_bar1')]
|
||||
with CaptureStdout() as cso:
|
||||
utils.print_list(to_print, ['foo', 'bar'], sortby_index=1)
|
||||
# Output should be sorted by the first key (bar)
|
||||
self.assertEqual("""\
|
||||
+-----------+-----------+
|
||||
| foo | bar |
|
||||
+-----------+-----------+
|
||||
| fake_foo2 | fake_bar1 |
|
||||
| fake_foo1 | fake_bar2 |
|
||||
+-----------+-----------+
|
||||
""", cso.read())
|
||||
|
||||
def test_print_list_with_list_no_sort(self):
|
||||
Row = collections.namedtuple('Row', ['foo', 'bar'])
|
||||
to_print = [Row(foo='fake_foo2', bar='fake_bar1'),
|
||||
Row(foo='fake_foo1', bar='fake_bar2')]
|
||||
with CaptureStdout() as cso:
|
||||
utils.print_list(to_print, ['foo', 'bar'], sortby_index=None)
|
||||
# Output should be in the order given
|
||||
self.assertEqual("""\
|
||||
+-----------+-----------+
|
||||
| foo | bar |
|
||||
+-----------+-----------+
|
||||
| fake_foo2 | fake_bar1 |
|
||||
| fake_foo1 | fake_bar2 |
|
||||
+-----------+-----------+
|
||||
""", cso.read())
|
||||
|
||||
def test_print_list_with_generator(self):
|
||||
Row = collections.namedtuple('Row', ['foo', 'bar'])
|
||||
|
||||
def gen_rows():
|
||||
for row in [Row(foo='fake_foo1', bar='fake_bar2'),
|
||||
Row(foo='fake_foo2', bar='fake_bar1')]:
|
||||
yield row
|
||||
with CaptureStdout() as cso:
|
||||
utils.print_list(gen_rows(), ['foo', 'bar'])
|
||||
self.assertEqual("""\
|
||||
+-----------+-----------+
|
||||
| foo | bar |
|
||||
+-----------+-----------+
|
||||
| fake_foo1 | fake_bar2 |
|
||||
| fake_foo2 | fake_bar1 |
|
||||
+-----------+-----------+
|
||||
""", cso.read())
|
||||
|
||||
|
||||
class PrintDictTestCase(testtools.TestCase):
|
||||
|
||||
def test_print_dict(self):
|
||||
data = {'foo': 'fake_foo', 'bar': 'fake_bar'}
|
||||
with CaptureStdout() as cso:
|
||||
utils.print_dict(data)
|
||||
# Output should be sorted by the Property
|
||||
self.assertEqual("""\
|
||||
+----------+----------+
|
||||
| Property | Value |
|
||||
+----------+----------+
|
||||
| bar | fake_bar |
|
||||
| foo | fake_foo |
|
||||
+----------+----------+
|
||||
""", cso.read())
|
||||
|
||||
def test_print_dict_with_None_data(self):
|
||||
Row = collections.namedtuple('Row', ['foo', 'bar'])
|
||||
to_print = [Row(foo='fake_foo1', bar=None),
|
||||
Row(foo='fake_foo2', bar='fake_bar1')]
|
||||
with CaptureStdout() as cso:
|
||||
utils.print_list(to_print, ['foo', 'bar'])
|
||||
# Output should be sorted by the first key (foo)
|
||||
self.assertEqual("""\
|
||||
+-----------+-----------+
|
||||
| foo | bar |
|
||||
+-----------+-----------+
|
||||
| fake_foo1 | - |
|
||||
| fake_foo2 | fake_bar1 |
|
||||
+-----------+-----------+
|
||||
""", cso.read())
|
@ -1,179 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import sys
|
||||
|
||||
import json
|
||||
import mock
|
||||
from osc_lib.tests import utils
|
||||
import requests
|
||||
import six
|
||||
|
||||
|
||||
AUTH_TOKEN = "foobar"
|
||||
AUTH_URL = "http://0.0.0.0"
|
||||
USERNAME = "itchy"
|
||||
PASSWORD = "scratchy"
|
||||
|
||||
TEST_RESPONSE_DICT_V3 = {
|
||||
"token": {
|
||||
"audit_ids": [
|
||||
"a"
|
||||
],
|
||||
"catalog": [
|
||||
],
|
||||
"expires_at": "2034-09-29T18:27:15.978064Z",
|
||||
"extras": {},
|
||||
"issued_at": "2014-09-29T17:27:15.978097Z",
|
||||
"methods": [
|
||||
"password"
|
||||
],
|
||||
"project": {
|
||||
"domain": {
|
||||
"id": "default",
|
||||
"name": "Default"
|
||||
},
|
||||
"id": "bbb",
|
||||
"name": "project"
|
||||
},
|
||||
"roles": [
|
||||
],
|
||||
"user": {
|
||||
"domain": {
|
||||
"id": "default",
|
||||
"name": "Default"
|
||||
},
|
||||
"id": "aaa",
|
||||
"name": USERNAME
|
||||
}
|
||||
}
|
||||
}
|
||||
TEST_VERSIONS = {
|
||||
"versions": {
|
||||
"values": [
|
||||
{
|
||||
"id": "v3.0",
|
||||
"links": [
|
||||
{
|
||||
"href": AUTH_URL,
|
||||
"rel": "self"
|
||||
}
|
||||
],
|
||||
"media-types": [
|
||||
{
|
||||
"base": "application/json",
|
||||
"type": "application/vnd.openstack.identity-v3+json"
|
||||
},
|
||||
{
|
||||
"base": "application/xml",
|
||||
"type": "application/vnd.openstack.identity-v3+xml"
|
||||
}
|
||||
],
|
||||
"status": "stable",
|
||||
"updated": "2013-03-06T00:00:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class FakeStdout(object):
|
||||
def __init__(self):
|
||||
self.content = []
|
||||
|
||||
def write(self, text):
|
||||
self.content.append(text)
|
||||
|
||||
def make_string(self):
|
||||
result = ''
|
||||
for line in self.content:
|
||||
result = result + line
|
||||
return result
|
||||
|
||||
|
||||
class FakeApp(object):
|
||||
def __init__(self, _stdout):
|
||||
self.stdout = _stdout
|
||||
self.client_manager = None
|
||||
self.stdin = sys.stdin
|
||||
self.stdout = _stdout or sys.stdout
|
||||
self.stderr = sys.stderr
|
||||
|
||||
|
||||
class FakeClient(object):
|
||||
def __init__(self, **kwargs):
|
||||
self.auth_url = kwargs['auth_url']
|
||||
self.token = kwargs['token']
|
||||
|
||||
|
||||
class FakeClientManager(object):
|
||||
def __init__(self):
|
||||
self.compute = None
|
||||
self.identity = None
|
||||
self.image = None
|
||||
self.object_store = None
|
||||
self.volume = None
|
||||
self.network = None
|
||||
self.session = None
|
||||
self.auth_ref = None
|
||||
|
||||
|
||||
class FakeModule(object):
|
||||
def __init__(self, name, version):
|
||||
self.name = name
|
||||
self.__version__ = version
|
||||
|
||||
|
||||
class FakeResource(object):
|
||||
def __init__(self, manager, info, loaded=False):
|
||||
self.manager = manager
|
||||
self._info = info
|
||||
self._add_details(info)
|
||||
self._loaded = loaded
|
||||
|
||||
def _add_details(self, info):
|
||||
for (k, v) in info.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
def __repr__(self):
|
||||
reprkeys = sorted(k for k in self.__dict__.keys() if k[0] != '_' and
|
||||
k != 'manager')
|
||||
info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys)
|
||||
return "<%s %s>" % (self.__class__.__name__, info)
|
||||
|
||||
|
||||
class FakeResponse(requests.Response):
|
||||
def __init__(self, headers={}, status_code=200, data=None, encoding=None):
|
||||
super(FakeResponse, self).__init__()
|
||||
|
||||
self.status_code = status_code
|
||||
|
||||
self.headers.update(headers)
|
||||
self._content = json.dumps(data)
|
||||
if not isinstance(self._content, six.binary_type):
|
||||
self._content = self._content.encode()
|
||||
|
||||
|
||||
class FakeClusteringv1Client(object):
|
||||
def __init__(self, **kwargs):
|
||||
self.http_client = mock.Mock()
|
||||
self.http_client.auth_token = kwargs['token']
|
||||
self.profiles = FakeResource(None, {})
|
||||
|
||||
|
||||
class TestClusteringv1(utils.TestCommand):
|
||||
def setUp(self):
|
||||
super(TestClusteringv1, self).setUp()
|
||||
|
||||
self.app.client_manager.clustering = FakeClusteringv1Client(
|
||||
token=AUTH_TOKEN, auth_url=AUTH_URL
|
||||
)
|
@ -1,176 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import mock
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib import exceptions as exc
|
||||
|
||||
from senlinclient.tests.unit.v1 import fakes
|
||||
from senlinclient.v1 import action as osc_action
|
||||
|
||||
|
||||
class TestAction(fakes.TestClusteringv1):
|
||||
|
||||
def setUp(self):
|
||||
super(TestAction, self).setUp()
|
||||
self.mock_client = self.app.client_manager.clustering
|
||||
|
||||
|
||||
class TestActionList(TestAction):
|
||||
|
||||
columns = ['id', 'name', 'action', 'status', 'target_id', 'depends_on',
|
||||
'depended_by', 'created_at']
|
||||
defaults = {
|
||||
'global_project': False,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'sort': None,
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestActionList, self).setUp()
|
||||
self.cmd = osc_action.ListAction(self.app, None)
|
||||
fake_action = mock.Mock(
|
||||
action="NODE_CREATE",
|
||||
cause="RPC Request",
|
||||
created_at="2015-12-04T04:54:41",
|
||||
depended_by=[],
|
||||
depends_on=[],
|
||||
end_time=1425550000.0,
|
||||
id="2366d440-c73e-4961-9254-6d1c3af7c167",
|
||||
inputs={},
|
||||
interval=-1,
|
||||
name="node_create_0df0931b",
|
||||
outputs={},
|
||||
owner=None,
|
||||
start_time=1425550000.0,
|
||||
status="SUCCEEDED",
|
||||
status_reason="Action completed successfully.",
|
||||
target_id="0df0931b-e251-4f2e-8719-4ebfda3627ba",
|
||||
timeout=3600,
|
||||
updated_at=None
|
||||
)
|
||||
fake_action.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.actions = mock.Mock(return_value=[fake_action])
|
||||
|
||||
def test_action_list_defaults(self):
|
||||
arglist = []
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.actions.assert_called_with(**self.defaults)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_action_list_full_id(self):
|
||||
arglist = ['--full-id']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.actions.assert_called_with(**self.defaults)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_action_list_limit(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['limit'] = '3'
|
||||
arglist = ['--limit', '3']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.actions.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_action_list_sort(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'name:asc'
|
||||
arglist = ['--sort', 'name:asc']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.actions.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_action_list_sort_invalid_key(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'bad_key'
|
||||
arglist = ['--sort', 'bad_key']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.actions.side_effect = sdk_exc.HttpException()
|
||||
self.assertRaises(sdk_exc.HttpException,
|
||||
self.cmd.take_action, parsed_args)
|
||||
|
||||
def test_action_list_sort_invalid_direction(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'name:bad_direction'
|
||||
arglist = ['--sort', 'name:bad_direction']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.actions.side_effect = sdk_exc.HttpException()
|
||||
self.assertRaises(sdk_exc.HttpException,
|
||||
self.cmd.take_action, parsed_args)
|
||||
|
||||
def test_action_list_filter(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['name'] = 'my_action'
|
||||
arglist = ['--filter', 'name=my_action']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.actions.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_action_list_marker(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['marker'] = 'a9448bf6'
|
||||
arglist = ['--marker', 'a9448bf6']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.actions.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
|
||||
class TestActionShow(TestAction):
|
||||
|
||||
def setUp(self):
|
||||
super(TestActionShow, self).setUp()
|
||||
self.cmd = osc_action.ShowAction(self.app, None)
|
||||
fake_action = mock.Mock(
|
||||
action="NODE_CREATE",
|
||||
cause="RPC Request",
|
||||
created_at="2015-12-04T04:54:41",
|
||||
depended_by=[],
|
||||
depends_on=[],
|
||||
end_time=1425550000.0,
|
||||
id="2366d440-c73e-4961-9254-6d1c3af7c167",
|
||||
inputs={},
|
||||
interval=-1,
|
||||
name="node_create_0df0931b",
|
||||
outputs={},
|
||||
owner=None,
|
||||
start_time=1425550000.0,
|
||||
status="SUCCEEDED",
|
||||
status_reason="Action completed successfully.",
|
||||
target_id="0df0931b-e251-4f2e-8719-4ebfda3627ba",
|
||||
timeout=3600,
|
||||
updated_at=None
|
||||
)
|
||||
fake_action.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.get_action = mock.Mock(return_value=fake_action)
|
||||
|
||||
def test_action_show(self):
|
||||
arglist = ['my_action']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.get_action.assert_called_with('my_action')
|
||||
|
||||
def test_action_show_not_found(self):
|
||||
arglist = ['my_action']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.get_action.side_effect = sdk_exc.ResourceNotFound()
|
||||
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertEqual('Action not found: my_action', str(error))
|
@ -1,35 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from senlinclient.tests.unit.v1 import fakes
|
||||
from senlinclient.v1 import build_info as osc_build_info
|
||||
|
||||
|
||||
class TestBuildInfo(fakes.TestClusteringv1):
|
||||
|
||||
def setUp(self):
|
||||
super(TestBuildInfo, self).setUp()
|
||||
self.cmd = osc_build_info.BuildInfo(self.app, None)
|
||||
self.mock_client = self.app.client_manager.clustering
|
||||
fake_bi = mock.Mock(
|
||||
api={"revision": "1.0"},
|
||||
engine={"revision": "1.0"}
|
||||
)
|
||||
self.mock_client.get_build_info = mock.Mock(return_value=fake_bi)
|
||||
|
||||
def test_build_info(self):
|
||||
arglist = []
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.get_build_info.assert_called_with()
|
@ -1,476 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
import testtools
|
||||
|
||||
from senlinclient.common import sdk
|
||||
from senlinclient.v1 import client
|
||||
|
||||
|
||||
@mock.patch.object(sdk, 'create_connection')
|
||||
class ClientTest(testtools.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(ClientTest, self).setUp()
|
||||
self.conn = mock.Mock()
|
||||
self.service = mock.Mock()
|
||||
self.conn.cluster = self.service
|
||||
|
||||
def test_init_default(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
|
||||
sc = client.Client()
|
||||
|
||||
self.assertEqual(self.conn, sc.conn)
|
||||
self.assertEqual(self.service, sc.service)
|
||||
mock_conn.assert_called_once_with(prof=None, user_agent=None)
|
||||
|
||||
def test_init_with_params(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
|
||||
sc = client.Client(prof='FOO', user_agent='BAR', zoo='LARR')
|
||||
|
||||
self.assertEqual(self.conn, sc.conn)
|
||||
self.assertEqual(self.service, sc.service)
|
||||
mock_conn.assert_called_once_with(prof='FOO', user_agent='BAR',
|
||||
zoo='LARR')
|
||||
|
||||
def test_profile_types(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.profile_types(foo='bar')
|
||||
self.assertEqual(self.service.profile_types.return_value, res)
|
||||
self.service.profile_types.assert_called_once_with(foo='bar')
|
||||
|
||||
def test_get_profile_type(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.get_profile_type('FOOBAR')
|
||||
self.assertEqual(self.service.get_profile_type.return_value, res)
|
||||
self.service.get_profile_type.assert_called_once_with('FOOBAR')
|
||||
|
||||
def test_profiles(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.profiles(foo='bar')
|
||||
self.assertEqual(self.service.profiles.return_value, res)
|
||||
self.service.profiles.assert_called_once_with(foo='bar')
|
||||
|
||||
def test_get_profile(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.get_profile('FOOBAR')
|
||||
self.assertEqual(self.service.get_profile.return_value, res)
|
||||
self.service.get_profile.assert_called_once_with('FOOBAR')
|
||||
|
||||
def test_update_profile(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.update_profile('FAKE_ID', foo='bar')
|
||||
self.assertEqual(self.service.update_profile.return_value, res)
|
||||
self.service.update_profile.assert_called_once_with('FAKE_ID',
|
||||
foo='bar')
|
||||
|
||||
def test_delete_profile(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.delete_profile('FAKE_ID')
|
||||
self.assertEqual(self.service.delete_profile.return_value, res)
|
||||
self.service.delete_profile.assert_called_once_with(
|
||||
'FAKE_ID', True)
|
||||
|
||||
def test_delete_profile_ignore_missing(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.delete_profile('FAKE_ID', False)
|
||||
self.assertEqual(self.service.delete_profile.return_value, res)
|
||||
self.service.delete_profile.assert_called_once_with(
|
||||
'FAKE_ID', False)
|
||||
|
||||
def test_policy_types(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.policy_types(foo='bar')
|
||||
self.assertEqual(self.service.policy_types.return_value, res)
|
||||
self.service.policy_types.assert_called_once_with(foo='bar')
|
||||
|
||||
def test_get_policy_type(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.get_policy_type('FOOBAR')
|
||||
self.assertEqual(self.service.get_policy_type.return_value, res)
|
||||
self.service.get_policy_type.assert_called_once_with('FOOBAR')
|
||||
|
||||
def test_policies(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.policies(foo='bar')
|
||||
self.assertEqual(self.service.policies.return_value, res)
|
||||
self.service.policies.assert_called_once_with(foo='bar')
|
||||
|
||||
def test_get_policy(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.get_policy('FOOBAR')
|
||||
self.assertEqual(self.service.get_policy.return_value, res)
|
||||
self.service.get_policy.assert_called_once_with('FOOBAR')
|
||||
|
||||
def test_update_policy(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.update_policy('FAKE_ID', foo='bar')
|
||||
self.assertEqual(self.service.update_policy.return_value, res)
|
||||
self.service.update_policy.assert_called_once_with(
|
||||
'FAKE_ID', foo='bar')
|
||||
|
||||
def test_delete_policy(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.delete_policy('FAKE_ID')
|
||||
self.assertEqual(self.service.delete_policy.return_value, res)
|
||||
self.service.delete_policy.assert_called_once_with(
|
||||
'FAKE_ID', True)
|
||||
|
||||
def test_delete_policy_ignore_missing(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.delete_policy('FAKE_ID', False)
|
||||
self.assertEqual(self.service.delete_policy.return_value, res)
|
||||
self.service.delete_policy.assert_called_once_with(
|
||||
'FAKE_ID', False)
|
||||
|
||||
def test_clusters(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.clusters(foo='bar')
|
||||
self.assertEqual(self.service.clusters.return_value, res)
|
||||
self.service.clusters.assert_called_once_with(foo='bar')
|
||||
|
||||
def test_get_cluster(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.get_cluster('FOOBAR')
|
||||
self.assertEqual(self.service.get_cluster.return_value, res)
|
||||
self.service.get_cluster.assert_called_once_with('FOOBAR')
|
||||
|
||||
def test_create_cluster(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.create_cluster(name='FOO', bar='zoo')
|
||||
self.assertEqual(self.service.create_cluster.return_value, res)
|
||||
self.service.create_cluster.assert_called_once_with(
|
||||
name='FOO', bar='zoo')
|
||||
|
||||
def test_update_cluster(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.update_cluster('FAKE_ID', foo='bar')
|
||||
self.assertEqual(self.service.update_cluster.return_value, res)
|
||||
self.service.update_cluster.assert_called_once_with(
|
||||
'FAKE_ID', foo='bar')
|
||||
|
||||
def test_delete_cluster(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.delete_cluster('FAKE_ID')
|
||||
self.assertEqual(self.service.delete_cluster.return_value, res)
|
||||
self.service.delete_cluster.assert_called_once_with(
|
||||
'FAKE_ID', True)
|
||||
|
||||
def test_delete_cluster_ignore_missing(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.delete_cluster('FAKE_ID', False)
|
||||
self.assertEqual(self.service.delete_cluster.return_value, res)
|
||||
self.service.delete_cluster.assert_called_once_with(
|
||||
'FAKE_ID', False)
|
||||
|
||||
def test_cluster_add_nodes(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.cluster_add_nodes('FAKE_ID', ['NODE1', 'NODE2'])
|
||||
self.assertEqual(self.service.cluster_add_nodes.return_value, res)
|
||||
self.service.cluster_add_nodes.assert_called_once_with(
|
||||
'FAKE_ID', ['NODE1', 'NODE2'])
|
||||
|
||||
def test_cluster_del_nodes(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.cluster_del_nodes('FAKE_ID', ['NODE1', 'NODE2'])
|
||||
self.assertEqual(self.service.cluster_del_nodes.return_value, res)
|
||||
self.service.cluster_del_nodes.assert_called_once_with(
|
||||
'FAKE_ID', ['NODE1', 'NODE2'])
|
||||
|
||||
def test_cluster_resize(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.cluster_resize('FAKE_ID', foo='bar', zoo=1)
|
||||
self.assertEqual(self.service.cluster_resize.return_value, res)
|
||||
self.service.cluster_resize.assert_called_once_with(
|
||||
'FAKE_ID', foo='bar', zoo=1)
|
||||
|
||||
def test_cluster_scale_in(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.cluster_scale_in('FAKE_ID', 3)
|
||||
self.assertEqual(self.service.cluster_scale_in.return_value, res)
|
||||
self.service.cluster_scale_in.assert_called_once_with(
|
||||
'FAKE_ID', 3)
|
||||
|
||||
def test_cluster_scale_out(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.cluster_scale_out('FAKE_ID', 3)
|
||||
self.assertEqual(self.service.cluster_scale_out.return_value, res)
|
||||
self.service.cluster_scale_out.assert_called_once_with(
|
||||
'FAKE_ID', 3)
|
||||
|
||||
def test_cluster_policies(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.cluster_policies('CLUSTER', foo='bar')
|
||||
self.assertEqual(self.service.cluster_policies.return_value, res)
|
||||
self.service.cluster_policies.assert_called_once_with(
|
||||
'CLUSTER', foo='bar')
|
||||
|
||||
def test_get_cluster_policy(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.get_cluster_policy('PID', 'CID')
|
||||
self.assertEqual(self.service.get_cluster_policy.return_value, res)
|
||||
self.service.get_cluster_policy.assert_called_once_with(
|
||||
'PID', 'CID')
|
||||
|
||||
def test_cluster_attach_policy(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.cluster_attach_policy('FOO', 'BAR', zoo='car')
|
||||
self.assertEqual(self.service.cluster_attach_policy.return_value, res)
|
||||
self.service.cluster_attach_policy.assert_called_once_with(
|
||||
'FOO', 'BAR', zoo='car')
|
||||
|
||||
def test_cluster_detach_policy(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.cluster_detach_policy('FOO', 'BAR')
|
||||
self.assertEqual(self.service.cluster_detach_policy.return_value, res)
|
||||
self.service.cluster_detach_policy.assert_called_once_with(
|
||||
'FOO', 'BAR')
|
||||
|
||||
def test_cluster_update_policy(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.cluster_update_policy('FOO', 'BAR', foo='bar')
|
||||
self.assertEqual(self.service.cluster_update_policy.return_value, res)
|
||||
self.service.cluster_update_policy.assert_called_once_with(
|
||||
'FOO', 'BAR', foo='bar')
|
||||
|
||||
def test_check_cluster(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.check_cluster('FAKE_CLUSTER_ID')
|
||||
self.assertEqual(self.service.check_cluster.return_value, res)
|
||||
self.service.check_cluster.assert_called_once_with('FAKE_CLUSTER_ID')
|
||||
|
||||
def test_recover_cluster(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.recover_cluster('FAKE_CLUSTER_ID')
|
||||
self.assertEqual(self.service.recover_cluster.return_value, res)
|
||||
self.service.recover_cluster.assert_called_once_with(
|
||||
'FAKE_CLUSTER_ID')
|
||||
|
||||
def test_nodes(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.nodes(foo='bar')
|
||||
self.assertEqual(self.service.nodes.return_value, res)
|
||||
self.service.nodes.assert_called_once_with(foo='bar')
|
||||
|
||||
def test_get_node(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.get_node('FOOBAR')
|
||||
self.assertEqual(self.service.get_node.return_value, res)
|
||||
self.service.get_node.assert_called_once_with('FOOBAR', details=False)
|
||||
|
||||
def test_get_node_with_details(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.get_node('FOOBAR', details=True)
|
||||
self.assertEqual(self.service.get_node.return_value, res)
|
||||
self.service.get_node.assert_called_once_with(
|
||||
'FOOBAR', details=True)
|
||||
|
||||
def test_create_node(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.create_node(name='FAKE_NAME', foo='bar')
|
||||
self.assertEqual(self.service.create_node.return_value, res)
|
||||
self.service.create_node.assert_called_once_with(
|
||||
name='FAKE_NAME', foo='bar')
|
||||
|
||||
def test_update_node(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.update_node('FAKE_ID', foo='bar')
|
||||
self.assertEqual(self.service.update_node.return_value, res)
|
||||
self.service.update_node.assert_called_once_with(
|
||||
'FAKE_ID', foo='bar')
|
||||
|
||||
def test_delete_node(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.delete_node('FAKE_ID')
|
||||
self.assertEqual(self.service.delete_node.return_value, res)
|
||||
self.service.delete_node.assert_called_once_with(
|
||||
'FAKE_ID', True)
|
||||
|
||||
def test_check_node(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.check_node('FAKE_ID')
|
||||
self.assertEqual(self.service.check_node.return_value, res)
|
||||
self.service.check_node.assert_called_once_with('FAKE_ID')
|
||||
|
||||
def test_recover_node(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.recover_node('FAKE_ID')
|
||||
self.assertEqual(self.service.recover_node.return_value, res)
|
||||
self.service.recover_node.assert_called_once_with(
|
||||
'FAKE_ID')
|
||||
|
||||
def test_delete_node_ignore_missing(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.delete_node('FAKE_ID', False)
|
||||
self.assertEqual(self.service.delete_node.return_value, res)
|
||||
self.service.delete_node.assert_called_once_with(
|
||||
'FAKE_ID', False)
|
||||
|
||||
def test_receivers(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.receivers(foo='bar')
|
||||
self.assertEqual(self.service.receivers.return_value, res)
|
||||
self.service.receivers.assert_called_once_with(foo='bar')
|
||||
|
||||
def test_get_receiver(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.get_receiver('FOOBAR')
|
||||
self.assertEqual(self.service.get_receiver.return_value, res)
|
||||
self.service.get_receiver.assert_called_once_with('FOOBAR')
|
||||
|
||||
def test_create_receiver(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.create_receiver(name='FAKE_NAME', foo='bar')
|
||||
self.assertEqual(self.service.create_receiver.return_value, res)
|
||||
self.service.create_receiver.assert_called_once_with(
|
||||
name='FAKE_NAME', foo='bar')
|
||||
|
||||
def test_delete_receiver(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.delete_receiver('FAKE_ID')
|
||||
self.assertEqual(self.service.delete_receiver.return_value, res)
|
||||
self.service.delete_receiver.assert_called_once_with(
|
||||
'FAKE_ID', True)
|
||||
|
||||
def test_delete_receiver_ignore_missing(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.delete_receiver('FAKE_ID', False)
|
||||
self.assertEqual(self.service.delete_receiver.return_value, res)
|
||||
self.service.delete_receiver.assert_called_once_with(
|
||||
'FAKE_ID', False)
|
||||
|
||||
def test_actions(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.actions(foo='bar')
|
||||
self.assertEqual(self.service.actions.return_value, res)
|
||||
self.service.actions.assert_called_once_with(foo='bar')
|
||||
|
||||
def test_get_action(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.get_action('FOOBAR')
|
||||
self.assertEqual(self.service.get_action.return_value, res)
|
||||
self.service.get_action.assert_called_once_with('FOOBAR')
|
||||
|
||||
def test_events(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.events(foo='bar')
|
||||
self.assertEqual(self.service.events.return_value, res)
|
||||
self.service.events.assert_called_once_with(foo='bar')
|
||||
|
||||
def test_get_event(self, mock_conn):
|
||||
mock_conn.return_value = self.conn
|
||||
sc = client.Client()
|
||||
|
||||
res = sc.get_event('FOOBAR')
|
||||
self.assertEqual(self.service.get_event.return_value, res)
|
||||
self.service.get_event.assert_called_once_with('FOOBAR')
|
File diff suppressed because it is too large
Load Diff
@ -1,100 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from senlinclient.tests.unit.v1 import fakes
|
||||
from senlinclient.v1 import cluster_policy as osc_cluster_policy
|
||||
|
||||
|
||||
class TestClusterPolicy(fakes.TestClusteringv1):
|
||||
def setUp(self):
|
||||
super(TestClusterPolicy, self).setUp()
|
||||
self.mock_client = self.app.client_manager.clustering
|
||||
|
||||
|
||||
class TestClusterPolicyList(TestClusterPolicy):
|
||||
|
||||
def setUp(self):
|
||||
super(TestClusterPolicyList, self).setUp()
|
||||
self.cmd = osc_cluster_policy.ClusterPolicyList(self.app, None)
|
||||
fake_cluster = mock.Mock(id='C1')
|
||||
self.mock_client.get_cluster = mock.Mock(return_value=fake_cluster)
|
||||
fake_binding = mock.Mock(
|
||||
cluster_id="7d85f602-a948-4a30-afd4-e84f47471c15",
|
||||
cluster_name="my_cluster",
|
||||
is_enabled=True,
|
||||
id="06be3a1f-b238-4a96-a737-ceec5714087e",
|
||||
policy_id="714fe676-a08f-4196-b7af-61d52eeded15",
|
||||
policy_name="my_policy",
|
||||
policy_type="senlin.policy.deletion-1.0"
|
||||
)
|
||||
fake_binding.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.cluster_policies = mock.Mock(
|
||||
return_value=[fake_binding])
|
||||
|
||||
def test_cluster_policy_list(self):
|
||||
arglist = ['--sort', 'name:asc', '--filter', 'name=my_policy',
|
||||
'my_cluster', '--full-id']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
expected_columns = ['policy_id', 'policy_name', 'policy_type',
|
||||
'is_enabled']
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
self.mock_client.get_cluster.assert_called_with('my_cluster')
|
||||
self.mock_client.cluster_policies.assert_called_with(
|
||||
'C1', name="my_policy", sort="name:asc")
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
|
||||
class TestClusterPolicyShow(TestClusterPolicy):
|
||||
|
||||
def setUp(self):
|
||||
super(TestClusterPolicyShow, self).setUp()
|
||||
self.cmd = osc_cluster_policy.ClusterPolicyShow(self.app, None)
|
||||
fake_binding = mock.Mock(
|
||||
cluster_id="7d85f602-a948-4a30-afd4-e84f47471c15",
|
||||
cluster_name="my_cluster",
|
||||
is_enabled=True,
|
||||
id="06be3a1f-b238-4a96-a737-ceec5714087e",
|
||||
policy_id="714fe676-a08f-4196-b7af-61d52eeded15",
|
||||
policy_name="my_policy",
|
||||
policy_type="senlin.policy.deletion-1.0"
|
||||
)
|
||||
fake_binding.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.get_cluster_policy = mock.Mock(
|
||||
return_value=fake_binding)
|
||||
|
||||
def test_cluster_policy_show(self):
|
||||
arglist = ['--policy', 'my_policy', 'my_cluster']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.get_cluster_policy.assert_called_with('my_policy',
|
||||
'my_cluster')
|
||||
|
||||
|
||||
class TestClusterPolicyUpdate(TestClusterPolicy):
|
||||
|
||||
def setUp(self):
|
||||
super(TestClusterPolicyUpdate, self).setUp()
|
||||
self.cmd = osc_cluster_policy.ClusterPolicyUpdate(self.app, None)
|
||||
fake_resp = {"action": "8bb476c3-0f4c-44ee-9f64-c7b0260814de"}
|
||||
self.mock_client.cluster_update_policy = mock.Mock(
|
||||
return_value=fake_resp)
|
||||
|
||||
def test_cluster_policy_update(self):
|
||||
arglist = ['--policy', 'my_policy', '--enabled', 'true', 'my_cluster']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.cluster_update_policy.assert_called_with(
|
||||
'my_cluster', 'my_policy', enabled=True)
|
@ -1,165 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import mock
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib import exceptions as exc
|
||||
|
||||
from senlinclient.tests.unit.v1 import fakes
|
||||
from senlinclient.v1 import event as osc_event
|
||||
|
||||
|
||||
class TestEvent(fakes.TestClusteringv1):
|
||||
def setUp(self):
|
||||
super(TestEvent, self).setUp()
|
||||
self.mock_client = self.app.client_manager.clustering
|
||||
|
||||
|
||||
class TestEventList(TestEvent):
|
||||
|
||||
columns = ['id', 'generated_at', 'obj_type', 'obj_id', 'obj_name',
|
||||
'action', 'status', 'level', 'cluster_id']
|
||||
defaults = {
|
||||
'global_project': False,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'sort': None,
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestEventList, self).setUp()
|
||||
self.cmd = osc_event.ListEvent(self.app, None)
|
||||
fake_event = mock.Mock(
|
||||
action="CREATE",
|
||||
cluster_id=None,
|
||||
id="2d255b9c-8f36-41a2-a137-c0175ccc29c3",
|
||||
level="20",
|
||||
obj_id="0df0931b-e251-4f2e-8719-4ebfda3627ba",
|
||||
obj_name="node009",
|
||||
obj_type="NODE",
|
||||
project_id="6e18cc2bdbeb48a5b3cad2dc499f6804",
|
||||
status="CREATING",
|
||||
generated_at="2015-03-05T08:53:15",
|
||||
user_id="a21ded6060534d99840658a777c2af5a"
|
||||
)
|
||||
fake_event.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.events = mock.Mock(return_value=[fake_event])
|
||||
|
||||
def test_event_list_defaults(self):
|
||||
arglist = []
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.events.assert_called_with(**self.defaults)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_event_list_full_id(self):
|
||||
arglist = ['--full-id']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.events.assert_called_with(**self.defaults)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_event_list_limit(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['limit'] = '3'
|
||||
arglist = ['--limit', '3']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.events.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_event_list_sort(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'name:asc'
|
||||
arglist = ['--sort', 'name:asc']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.events.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_event_list_sort_invalid_key(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'bad_key'
|
||||
arglist = ['--sort', 'bad_key']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.events.side_effect = sdk_exc.HttpException()
|
||||
self.assertRaises(sdk_exc.HttpException,
|
||||
self.cmd.take_action, parsed_args)
|
||||
|
||||
def test_event_list_sort_invalid_direction(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'name:bad_direction'
|
||||
arglist = ['--sort', 'name:bad_direction']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.events.side_effect = sdk_exc.HttpException()
|
||||
self.assertRaises(sdk_exc.HttpException,
|
||||
self.cmd.take_action, parsed_args)
|
||||
|
||||
def test_event_list_filter(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['name'] = 'my_event'
|
||||
arglist = ['--filter', 'name=my_event']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.events.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_event_list_marker(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['marker'] = 'a9448bf6'
|
||||
arglist = ['--marker', 'a9448bf6']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.events.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
|
||||
class TestEventShow(TestEvent):
|
||||
|
||||
def setUp(self):
|
||||
super(TestEventShow, self).setUp()
|
||||
self.cmd = osc_event.ShowEvent(self.app, None)
|
||||
fake_event = mock.Mock(
|
||||
action="CREATE",
|
||||
cluster_id=None,
|
||||
id="2d255b9c-8f36-41a2-a137-c0175ccc29c3",
|
||||
level="20",
|
||||
obj_id="0df0931b-e251-4f2e-8719-4ebfda3627ba",
|
||||
obj_name="node009",
|
||||
obj_type="NODE",
|
||||
project_id="6e18cc2bdbeb48a5b3cad2dc499f6804",
|
||||
status="CREATING",
|
||||
generated_at="2015-03-05T08:53:15",
|
||||
user_id="a21ded6060534d99840658a777c2af5a"
|
||||
)
|
||||
fake_event.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.get_event = mock.Mock(return_value=fake_event)
|
||||
|
||||
def test_event_show(self):
|
||||
arglist = ['my_event']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.mock_client.get_event.assert_called_with('my_event')
|
||||
|
||||
def test_event_show_not_found(self):
|
||||
arglist = ['my_event']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.get_event.side_effect = sdk_exc.ResourceNotFound()
|
||||
|
||||
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
|
||||
parsed_args)
|
||||
|
||||
self.assertEqual('Event not found: my_event', str(error))
|
@ -1,458 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import mock
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib import exceptions as exc
|
||||
import six
|
||||
|
||||
from senlinclient.tests.unit.v1 import fakes
|
||||
from senlinclient.v1 import node as osc_node
|
||||
|
||||
|
||||
class TestNode(fakes.TestClusteringv1):
|
||||
def setUp(self):
|
||||
super(TestNode, self).setUp()
|
||||
self.mock_client = self.app.client_manager.clustering
|
||||
|
||||
|
||||
class TestNodeList(TestNode):
|
||||
|
||||
columns = ['id', 'name', 'index', 'status', 'cluster_id',
|
||||
'physical_id', 'profile_name', 'created_at', 'updated_at']
|
||||
|
||||
defaults = {
|
||||
'cluster_id': None,
|
||||
'global_project': False,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'sort': None,
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestNodeList, self).setUp()
|
||||
self.cmd = osc_node.ListNode(self.app, None)
|
||||
fake_node = mock.Mock(
|
||||
cluster_id=None,
|
||||
created_at="2015-02-27T04:39:21",
|
||||
data={},
|
||||
details={},
|
||||
domain=None,
|
||||
id="573aa1ba-bf45-49fd-907d-6b5d6e6adfd3",
|
||||
index=-1,
|
||||
init_at="2015-02-27T04:39:18",
|
||||
metadata={},
|
||||
physical_id="cc028275-d078-4729-bf3e-154b7359814b",
|
||||
profile_id="edc63d0a-2ca4-48fa-9854-27926da76a4a",
|
||||
profile_name="mystack",
|
||||
project_id="6e18cc2bdbeb48a5b3cad2dc499f6804",
|
||||
role=None,
|
||||
status="ACTIVE",
|
||||
status_reason="Creation succeeded",
|
||||
updated_at=None,
|
||||
user_id="5e5bf8027826429c96af157f68dc9072"
|
||||
)
|
||||
fake_node.name = "node00a"
|
||||
fake_node.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.nodes = mock.Mock(return_value=[fake_node])
|
||||
|
||||
def test_node_list_defaults(self):
|
||||
arglist = []
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.nodes.assert_called_with(**self.defaults)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_node_list_full_id(self):
|
||||
arglist = ['--full-id']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.nodes.assert_called_with(**self.defaults)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_node_list_limit(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['limit'] = '3'
|
||||
arglist = ['--limit', '3']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.nodes.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_node_list_sort(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'name:asc'
|
||||
arglist = ['--sort', 'name:asc']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.nodes.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_node_list_sort_invalid_key(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'bad_key'
|
||||
arglist = ['--sort', 'bad_key']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.nodes.side_effect = sdk_exc.HttpException()
|
||||
self.assertRaises(sdk_exc.HttpException,
|
||||
self.cmd.take_action, parsed_args)
|
||||
|
||||
def test_node_list_sort_invalid_direction(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'name:bad_direction'
|
||||
arglist = ['--sort', 'name:bad_direction']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.nodes.side_effect = sdk_exc.HttpException()
|
||||
self.assertRaises(sdk_exc.HttpException,
|
||||
self.cmd.take_action, parsed_args)
|
||||
|
||||
def test_node_list_filter(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['name'] = 'my_node'
|
||||
arglist = ['--filter', 'name=my_node']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.nodes.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_node_list_marker(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['marker'] = 'a9448bf6'
|
||||
arglist = ['--marker', 'a9448bf6']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.nodes.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
|
||||
class TestNodeShow(TestNode):
|
||||
response = {"node": {
|
||||
}}
|
||||
|
||||
def setUp(self):
|
||||
super(TestNodeShow, self).setUp()
|
||||
self.cmd = osc_node.ShowNode(self.app, None)
|
||||
fake_node = mock.Mock(
|
||||
cluster_id=None,
|
||||
created_at="2015-02-10T12:03:16",
|
||||
data={},
|
||||
details={"OS-DCF:diskConfig": "MANUAL"},
|
||||
domain_id=None,
|
||||
id="d5779bb0-f0a0-49c9-88cc-6f078adb5a0b",
|
||||
index=-1,
|
||||
init_at="2015-02-10T12:03:13",
|
||||
metadata={},
|
||||
physical_id="f41537fa-22ab-4bea-94c0-c874e19d0c80",
|
||||
profile_id="edc63d0a-2ca4-48fa-9854-27926da76a4a",
|
||||
profile_name="mystack",
|
||||
project_id="6e18cc2bdbeb48a5b3cad2dc499f6804",
|
||||
role=None,
|
||||
status="ACTIVE",
|
||||
status_reason="Creation succeeded",
|
||||
updated_at="2015-03-04T04:58:27",
|
||||
user_id="5e5bf8027826429c96af157f68dc9072"
|
||||
)
|
||||
fake_node.name = "my_node"
|
||||
fake_node.to_dict = mock.Mock(
|
||||
return_value={'details': {'key': 'value'}}
|
||||
)
|
||||
self.mock_client.get_node = mock.Mock(return_value=fake_node)
|
||||
|
||||
def test_node_show(self):
|
||||
arglist = ['my_node']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.get_node.assert_called_with('my_node', details=False)
|
||||
|
||||
def test_node_show_with_details(self):
|
||||
arglist = ['my_node', '--details']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.mock_client.get_node.assert_called_with(
|
||||
'my_node', details=True)
|
||||
|
||||
def test_node_show_not_found(self):
|
||||
arglist = ['my_node']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.get_node.side_effect = sdk_exc.ResourceNotFound()
|
||||
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertEqual('Node not found: my_node', str(error))
|
||||
|
||||
|
||||
class TestNodeCreate(TestNode):
|
||||
|
||||
defaults = {
|
||||
"cluster_id": None,
|
||||
"metadata": {},
|
||||
"name": "my_node",
|
||||
"profile_id": "mystack",
|
||||
"role": None
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestNodeCreate, self).setUp()
|
||||
self.cmd = osc_node.CreateNode(self.app, None)
|
||||
fake_node = mock.Mock(
|
||||
action="2366d440-c73e-4961-9254-6d1c3af7c167",
|
||||
cluster_id="",
|
||||
created_at=None,
|
||||
data={},
|
||||
domain=None,
|
||||
id="0df0931b-e251-4f2e-8719-4ebfda3627ba",
|
||||
index=-1,
|
||||
init_time="2015-03-05T08:53:15",
|
||||
metadata={},
|
||||
physical_id=None,
|
||||
profile_id="edc63d0a-2ca4-48fa-9854-27926da76a4a",
|
||||
profile_name="mystack",
|
||||
project_id="6e18cc2bdbeb48a5b3cad2dc499f6804",
|
||||
role="master",
|
||||
status="INIT",
|
||||
status_reason="Initializing",
|
||||
updated_at=None,
|
||||
user_id="5e5bf8027826429c96af157f68dc9072"
|
||||
)
|
||||
fake_node.name = "my_node"
|
||||
fake_node.to_dict = mock.Mock(return_value={})
|
||||
|
||||
self.mock_client.create_node = mock.Mock(return_value=fake_node)
|
||||
self.mock_client.get_node = mock.Mock(return_value=fake_node)
|
||||
|
||||
def test_node_create_defaults(self):
|
||||
arglist = ['my_node', '--profile', 'mystack']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.create_node.assert_called_with(**self.defaults)
|
||||
|
||||
def test_node_create_with_metadata(self):
|
||||
arglist = ['my_node', '--profile', 'mystack',
|
||||
'--metadata', 'key1=value1;key2=value2']
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['metadata'] = {'key1': 'value1', 'key2': 'value2'}
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.create_node.assert_called_with(**kwargs)
|
||||
|
||||
def test_node_create_with_cluster(self):
|
||||
arglist = ['my_node', '--profile', 'mystack',
|
||||
'--cluster', 'mycluster']
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['cluster_id'] = 'mycluster'
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.create_node.assert_called_with(**kwargs)
|
||||
|
||||
def test_node_create_with_role(self):
|
||||
arglist = ['my_node', '--profile', 'mystack',
|
||||
'--role', 'master']
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['role'] = 'master'
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.create_node.assert_called_with(**kwargs)
|
||||
|
||||
|
||||
class TestNodeUpdate(TestNode):
|
||||
|
||||
defaults = {
|
||||
"name": "new_node",
|
||||
"metadata": {
|
||||
"nk1": "nv1",
|
||||
"nk2": "nv2",
|
||||
},
|
||||
"profile_id": "new_profile",
|
||||
"role": "new_role"
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestNodeUpdate, self).setUp()
|
||||
self.cmd = osc_node.UpdateNode(self.app, None)
|
||||
fake_node = mock.Mock(
|
||||
action="2366d440-c73e-4961-9254-6d1c3af7c167",
|
||||
cluster_id="",
|
||||
created_at=None,
|
||||
data={},
|
||||
domain=None,
|
||||
id="0df0931b-e251-4f2e-8719-4ebfda3627ba",
|
||||
index=-1,
|
||||
init_time="2015-03-05T08:53:15",
|
||||
metadata={},
|
||||
physical_id=None,
|
||||
profile_id="edc63d0a-2ca4-48fa-9854-27926da76a4a",
|
||||
profile_name="mystack",
|
||||
project_id="6e18cc2bdbeb48a5b3cad2dc499f6804",
|
||||
role="master",
|
||||
status="INIT",
|
||||
status_reason="Initializing",
|
||||
updated_at=None,
|
||||
user_id="5e5bf8027826429c96af157f68dc9072"
|
||||
)
|
||||
fake_node.name = "my_node"
|
||||
fake_node.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.update_node = mock.Mock(return_value=fake_node)
|
||||
self.mock_client.get_node = mock.Mock(return_value=fake_node)
|
||||
self.mock_client.find_node = mock.Mock(return_value=fake_node)
|
||||
|
||||
def test_node_update_defaults(self):
|
||||
arglist = ['--name', 'new_node', '--metadata', 'nk1=nv1;nk2=nv2',
|
||||
'--profile', 'new_profile', '--role', 'new_role',
|
||||
'0df0931b']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.update_node.assert_called_with(
|
||||
'0df0931b-e251-4f2e-8719-4ebfda3627ba', **self.defaults)
|
||||
|
||||
def test_node_update_not_found(self):
|
||||
arglist = ['--name', 'new_node', '--metadata', 'nk1=nv1;nk2=nv2',
|
||||
'--profile', 'new_profile', '--role', 'new_role',
|
||||
'c6b8b252']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.find_node.return_value = None
|
||||
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertIn('Node not found: c6b8b252', str(error))
|
||||
|
||||
|
||||
class TestNodeDelete(TestNode):
|
||||
def setUp(self):
|
||||
super(TestNodeDelete, self).setUp()
|
||||
self.cmd = osc_node.DeleteNode(self.app, None)
|
||||
mock_node = mock.Mock(location='loc/fake_action_id')
|
||||
self.mock_client.delete_node = mock.Mock(return_value=mock_node)
|
||||
|
||||
def test_node_delete(self):
|
||||
arglist = ['node1', 'node2', 'node3']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.delete_node.assert_has_calls(
|
||||
[mock.call('node1', False), mock.call('node2', False),
|
||||
mock.call('node3', False)]
|
||||
)
|
||||
|
||||
def test_node_delete_force(self):
|
||||
arglist = ['node1', 'node2', 'node3', '--force']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.delete_node.assert_has_calls(
|
||||
[mock.call('node1', False), mock.call('node2', False),
|
||||
mock.call('node3', False)]
|
||||
)
|
||||
|
||||
def test_node_delete_not_found(self):
|
||||
arglist = ['my_node']
|
||||
self.mock_client.delete_node.side_effect = sdk_exc.ResourceNotFound
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.mock_client.delete_node.assert_has_calls(
|
||||
[mock.call('my_node', False)]
|
||||
)
|
||||
|
||||
def test_node_delete_one_found_one_not_found(self):
|
||||
arglist = ['node1', 'node2']
|
||||
self.mock_client.delete_node.side_effect = (
|
||||
[None, sdk_exc.ResourceNotFound]
|
||||
)
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.mock_client.delete_node.assert_has_calls(
|
||||
[mock.call('node1', False), mock.call('node2', False)]
|
||||
)
|
||||
|
||||
@mock.patch('sys.stdin', spec=six.StringIO)
|
||||
def test_node_delete_prompt_yes(self, mock_stdin):
|
||||
arglist = ['my_node']
|
||||
mock_stdin.isatty.return_value = True
|
||||
mock_stdin.readline.return_value = 'y'
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
mock_stdin.readline.assert_called_with()
|
||||
self.mock_client.delete_node.assert_called_with('my_node',
|
||||
False)
|
||||
|
||||
@mock.patch('sys.stdin', spec=six.StringIO)
|
||||
def test_node_delete_prompt_no(self, mock_stdin):
|
||||
arglist = ['my_node']
|
||||
mock_stdin.isatty.return_value = True
|
||||
mock_stdin.readline.return_value = 'n'
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
mock_stdin.readline.assert_called_with()
|
||||
self.mock_client.delete_node.assert_not_called()
|
||||
|
||||
|
||||
class TestNodeCheck(TestNode):
|
||||
response = {"action": "8bb476c3-0f4c-44ee-9f64-c7b0260814de"}
|
||||
|
||||
def setUp(self):
|
||||
super(TestNodeCheck, self).setUp()
|
||||
self.cmd = osc_node.CheckNode(self.app, None)
|
||||
self.mock_client.check_node = mock.Mock(
|
||||
return_value=self.response)
|
||||
|
||||
def test_node_check(self):
|
||||
arglist = ['node1', 'node2', 'node3']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.check_node.assert_has_calls(
|
||||
[mock.call('node1'), mock.call('node2'),
|
||||
mock.call('node3')]
|
||||
)
|
||||
|
||||
def test_node_check_not_found(self):
|
||||
arglist = ['node1']
|
||||
self.mock_client.check_node.side_effect = sdk_exc.ResourceNotFound
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertIn('Node not found: node1', str(error))
|
||||
|
||||
|
||||
class TestNodeRecover(TestNode):
|
||||
|
||||
action_id = "8bb476c3-0f4c-44ee-9f64-c7b0260814de"
|
||||
|
||||
def setUp(self):
|
||||
super(TestNodeRecover, self).setUp()
|
||||
self.cmd = osc_node.RecoverNode(self.app, None)
|
||||
fake_res = {'action': self.action_id}
|
||||
self.mock_client.recover_node = mock.Mock(return_value=fake_res)
|
||||
|
||||
def test_node_recover(self):
|
||||
arglist = ['node1', 'node2', 'node3', '--check', 'false']
|
||||
kwargs = {'check': False}
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.recover_node.assert_has_calls(
|
||||
[mock.call('node1', **kwargs), mock.call('node2', **kwargs),
|
||||
mock.call('node3', **kwargs)]
|
||||
)
|
||||
|
||||
def test_node_recover_not_found(self):
|
||||
arglist = ['node1']
|
||||
self.mock_client.recover_node.side_effect = sdk_exc.ResourceNotFound
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertIn('Node not found: node1', str(error))
|
@ -1,441 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import mock
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib import exceptions as exc
|
||||
import six
|
||||
|
||||
from senlinclient.tests.unit.v1 import fakes
|
||||
from senlinclient.v1 import policy as osc_policy
|
||||
|
||||
|
||||
class TestPolicy(fakes.TestClusteringv1):
|
||||
def setUp(self):
|
||||
super(TestPolicy, self).setUp()
|
||||
self.mock_client = self.app.client_manager.clustering
|
||||
|
||||
|
||||
class TestPolicyList(TestPolicy):
|
||||
columns = ['id', 'name', 'type', 'created_at']
|
||||
response = {"policies": [
|
||||
{
|
||||
}
|
||||
]}
|
||||
defaults = {
|
||||
'global_project': False,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'sort': None,
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestPolicyList, self).setUp()
|
||||
self.cmd = osc_policy.ListPolicy(self.app, None)
|
||||
fake_policy = mock.Mock(
|
||||
created_at="2015-02-15T08:33:13.000000",
|
||||
data={},
|
||||
domain=None,
|
||||
id="7192d8df-73be-4e98-ab99-1cf6d5066729",
|
||||
project_id="42d9e9663331431f97b75e25136307ff",
|
||||
spec={
|
||||
"description": "A test policy",
|
||||
"properties": {
|
||||
"criteria": "OLDEST_FIRST",
|
||||
"destroy_after_deletion": True,
|
||||
"grace_period": 60,
|
||||
"reduce_desired_capacity": False
|
||||
},
|
||||
"type": "senlin.policy.deletion",
|
||||
"version": "1.0"
|
||||
},
|
||||
type="senlin.policy.deletion-1.0",
|
||||
updated_at=None,
|
||||
user_id="5e5bf8027826429c96af157f68dc9072"
|
||||
)
|
||||
fake_policy.name = "test_policy_1"
|
||||
fake_policy.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.policies = mock.Mock(
|
||||
return_value=self.response)
|
||||
|
||||
def test_policy_list_defaults(self):
|
||||
arglist = []
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.policies.assert_called_with(**self.defaults)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_policy_list_full_id(self):
|
||||
arglist = ['--full-id']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.policies.assert_called_with(**self.defaults)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_policy_list_limit(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['limit'] = '3'
|
||||
arglist = ['--limit', '3']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.policies.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_policy_list_sort(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'name:asc'
|
||||
arglist = ['--sort', 'name:asc']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.policies.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_policy_list_sort_invalid_key(self):
|
||||
self.mock_client.policies = mock.Mock(
|
||||
return_value=self.response)
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'bad_key'
|
||||
arglist = ['--sort', 'bad_key']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.policies.side_effect = sdk_exc.HttpException()
|
||||
self.assertRaises(sdk_exc.HttpException,
|
||||
self.cmd.take_action, parsed_args)
|
||||
|
||||
def test_policy_list_sort_invalid_direction(self):
|
||||
self.mock_client.policies = mock.Mock(
|
||||
return_value=self.response)
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'name:bad_direction'
|
||||
arglist = ['--sort', 'name:bad_direction']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.policies.side_effect = sdk_exc.HttpException()
|
||||
self.assertRaises(sdk_exc.HttpException,
|
||||
self.cmd.take_action, parsed_args)
|
||||
|
||||
def test_policy_list_marker(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['marker'] = 'a9448bf6'
|
||||
arglist = ['--marker', 'a9448bf6']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.policies.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_policy_list_filter(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['name'] = 'my_policy'
|
||||
arglist = ['--filter', 'name=my_policy']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.policies.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
|
||||
class TestPolicyShow(TestPolicy):
|
||||
response = {"policy": {
|
||||
}}
|
||||
|
||||
def setUp(self):
|
||||
super(TestPolicyShow, self).setUp()
|
||||
self.cmd = osc_policy.ShowPolicy(self.app, None)
|
||||
fake_policy = mock.Mock(
|
||||
created_at="2015-03-02T07:40:31",
|
||||
data={},
|
||||
domain_id=None,
|
||||
id="02f62195-2198-4797-b0a9-877632208527",
|
||||
project_id="42d9e9663331431f97b75e25136307ff",
|
||||
spec={
|
||||
"properties": {
|
||||
"adjustment": {
|
||||
"best_effort": True,
|
||||
"min_step": 1,
|
||||
"number": 1,
|
||||
"type": "CHANGE_IN_CAPACITY"
|
||||
},
|
||||
"event": "CLUSTER_SCALE_IN"
|
||||
},
|
||||
"type": "senlin.policy.scaling",
|
||||
"version": "1.0"
|
||||
},
|
||||
type="senlin.policy.scaling-1.0",
|
||||
updated_at=None,
|
||||
user_id="5e5bf8027826429c96af157f68dc9072"
|
||||
)
|
||||
fake_policy.name = "sp001"
|
||||
fake_policy.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.get_policy = mock.Mock(return_value=fake_policy)
|
||||
|
||||
def test_policy_show(self):
|
||||
arglist = ['sp001']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.mock_client.get_policy.assert_called_with('sp001')
|
||||
policy = self.mock_client.get_policy('sp001')
|
||||
self.assertEqual("2015-03-02T07:40:31", policy.created_at)
|
||||
self.assertEqual({}, policy.data)
|
||||
self.assertEqual("02f62195-2198-4797-b0a9-877632208527", policy.id)
|
||||
self.assertEqual("sp001", policy.name)
|
||||
self.assertEqual("42d9e9663331431f97b75e25136307ff", policy.project_id)
|
||||
self.assertEqual("senlin.policy.scaling-1.0", policy.type)
|
||||
self.assertIsNone(policy.updated_at)
|
||||
|
||||
def test_policy_show_not_found(self):
|
||||
arglist = ['sp001']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.get_policy.side_effect = sdk_exc.ResourceNotFound()
|
||||
self.assertRaises(exc.CommandError, self.cmd.take_action, parsed_args)
|
||||
|
||||
|
||||
class TestPolicyCreate(TestPolicy):
|
||||
spec_path = 'senlinclient/tests/test_specs/deletion_policy.yaml'
|
||||
defaults = {
|
||||
"name": "my_policy",
|
||||
"spec": {
|
||||
"version": 1,
|
||||
"type": "senlin.policy.deletion",
|
||||
"description": "A policy for choosing victim node(s) from a "
|
||||
"cluster for deletion.",
|
||||
"properties": {
|
||||
"destroy_after_deletion": True,
|
||||
"grace_period": 60,
|
||||
"reduce_desired_capacity": False,
|
||||
"criteria": "OLDEST_FIRST"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestPolicyCreate, self).setUp()
|
||||
self.cmd = osc_policy.CreatePolicy(self.app, None)
|
||||
fake_policy = mock.Mock(
|
||||
created_at="2016-02-21T02:38:36",
|
||||
data={},
|
||||
domain_id=None,
|
||||
id="9f779ddf-744e-48bd-954c-acef7e11116c",
|
||||
project_id="5f1cc92b578e4e25a3b284179cf20a9b",
|
||||
spec={
|
||||
"description": "A policy for choosing victim node(s) from a "
|
||||
"cluster for deletion.",
|
||||
"properties": {
|
||||
"criteria": "OLDEST_FIRST",
|
||||
"destroy_after_deletion": True,
|
||||
"grace_period": 60,
|
||||
"reduce_desired_capacity": False
|
||||
},
|
||||
"type": "senlin.policy.deletion",
|
||||
"version": 1.0
|
||||
},
|
||||
type="senlin.policy.deletion-1.0",
|
||||
updated_at=None,
|
||||
user_id="2d7aca950f3e465d8ef0c81720faf6ff"
|
||||
)
|
||||
fake_policy.name = "my_policy"
|
||||
fake_policy.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.create_policy = mock.Mock(return_value=fake_policy)
|
||||
self.mock_client.get_policy = mock.Mock(return_value=fake_policy)
|
||||
|
||||
def test_policy_create_defaults(self):
|
||||
arglist = ['my_policy', '--spec-file', self.spec_path]
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.create_policy.assert_called_with(**self.defaults)
|
||||
|
||||
|
||||
class TestPolicyUpdate(TestPolicy):
|
||||
|
||||
def setUp(self):
|
||||
super(TestPolicyUpdate, self).setUp()
|
||||
self.cmd = osc_policy.UpdatePolicy(self.app, None)
|
||||
fake_policy = mock.Mock(
|
||||
created_at="2016-02-21T02:38:36",
|
||||
data={},
|
||||
domain_id=None,
|
||||
id="9f779ddf-744e-48bd-954c-acef7e11116c",
|
||||
project_id="5f1cc92b578e4e25a3b284179cf20a9b",
|
||||
spec={
|
||||
"description": "A policy for choosing victim node(s) from a "
|
||||
"cluster for deletion.",
|
||||
"properties": {
|
||||
"criteria": "OLDEST_FIRST",
|
||||
"destroy_after_deletion": True,
|
||||
"grace_period": 60,
|
||||
"reduce_desired_capacity": False
|
||||
},
|
||||
"type": "senlin.policy.deletion",
|
||||
"version": 1.0
|
||||
},
|
||||
type="senlin.policy.deletion-1.0",
|
||||
updated_at=None,
|
||||
user_id="2d7aca950f3e465d8ef0c81720faf6ff"
|
||||
)
|
||||
fake_policy.name = "new_policy"
|
||||
fake_policy.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.update_policy = mock.Mock(return_value=fake_policy)
|
||||
self.mock_client.get_policy = mock.Mock(return_value=fake_policy)
|
||||
self.mock_client.find_policy = mock.Mock(return_value=fake_policy)
|
||||
|
||||
def test_policy_update_defaults(self):
|
||||
arglist = ['--name', 'new_policy', '9f779ddf']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.mock_client.update_policy.assert_called_with(
|
||||
'9f779ddf-744e-48bd-954c-acef7e11116c', name="new_policy")
|
||||
|
||||
def test_policy_update_not_found(self):
|
||||
arglist = ['--name', 'new_policy', 'c6b8b252']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.find_policy.return_value = None
|
||||
error = self.assertRaises(exc.CommandError,
|
||||
self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertIn('Policy not found: c6b8b252', str(error))
|
||||
|
||||
|
||||
class TestPolicyDelete(TestPolicy):
|
||||
def setUp(self):
|
||||
super(TestPolicyDelete, self).setUp()
|
||||
self.cmd = osc_policy.DeletePolicy(self.app, None)
|
||||
self.mock_client.delete_policy = mock.Mock()
|
||||
|
||||
def test_policy_delete(self):
|
||||
arglist = ['policy1', 'policy2', 'policy3']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.delete_policy.assert_has_calls(
|
||||
[mock.call('policy1', False), mock.call('policy2', False),
|
||||
mock.call('policy3', False)]
|
||||
)
|
||||
|
||||
def test_policy_delete_force(self):
|
||||
arglist = ['policy1', 'policy2', 'policy3', '--force']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.delete_policy.assert_has_calls(
|
||||
[mock.call('policy1', False), mock.call('policy2', False),
|
||||
mock.call('policy3', False)]
|
||||
)
|
||||
|
||||
def test_policy_delete_not_found(self):
|
||||
arglist = ['my_policy']
|
||||
self.mock_client.delete_policy.side_effect = sdk_exc.ResourceNotFound
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertIn('Failed to delete 1 of the 1 specified policy(s).',
|
||||
str(error))
|
||||
|
||||
def test_policy_delete_one_found_one_not_found(self):
|
||||
arglist = ['policy1', 'policy2']
|
||||
self.mock_client.delete_policy.side_effect = (
|
||||
[None, sdk_exc.ResourceNotFound]
|
||||
)
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
error = self.assertRaises(exc.CommandError,
|
||||
self.cmd.take_action, parsed_args)
|
||||
self.mock_client.delete_policy.assert_has_calls(
|
||||
[mock.call('policy1', False), mock.call('policy2', False)]
|
||||
)
|
||||
self.assertEqual('Failed to delete 1 of the 2 specified policy(s).',
|
||||
str(error))
|
||||
|
||||
@mock.patch('sys.stdin', spec=six.StringIO)
|
||||
def test_policy_delete_prompt_yes(self, mock_stdin):
|
||||
arglist = ['my_policy']
|
||||
mock_stdin.isatty.return_value = True
|
||||
mock_stdin.readline.return_value = 'y'
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
mock_stdin.readline.assert_called_with()
|
||||
self.mock_client.delete_policy.assert_called_with('my_policy',
|
||||
False)
|
||||
|
||||
@mock.patch('sys.stdin', spec=six.StringIO)
|
||||
def test_policy_delete_prompt_no(self, mock_stdin):
|
||||
arglist = ['my_policy']
|
||||
mock_stdin.isatty.return_value = True
|
||||
mock_stdin.readline.return_value = 'n'
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
mock_stdin.readline.assert_called_with()
|
||||
self.mock_client.delete_policy.assert_not_called()
|
||||
|
||||
|
||||
class TestPolicyValidate(TestPolicy):
|
||||
spec_path = 'senlinclient/tests/test_specs/deletion_policy.yaml'
|
||||
defaults = {
|
||||
"spec": {
|
||||
"version": 1,
|
||||
"type": "senlin.policy.deletion",
|
||||
"description": "A policy for choosing victim node(s) from a "
|
||||
"cluster for deletion.",
|
||||
"properties": {
|
||||
"destroy_after_deletion": True,
|
||||
"grace_period": 60,
|
||||
"reduce_desired_capacity": False,
|
||||
"criteria": "OLDEST_FIRST"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestPolicyValidate, self).setUp()
|
||||
self.cmd = osc_policy.ValidatePolicy(self.app, None)
|
||||
fake_policy = mock.Mock(
|
||||
created_at=None,
|
||||
data={},
|
||||
domain_id=None,
|
||||
id=None,
|
||||
project_id="5f1cc92b578e4e25a3b284179cf20a9b",
|
||||
spec={
|
||||
"description": "A policy for choosing victim node(s) from a "
|
||||
"cluster for deletion.",
|
||||
"properties": {
|
||||
"criteria": "OLDEST_FIRST",
|
||||
"destroy_after_deletion": True,
|
||||
"grace_period": 60,
|
||||
"reduce_desired_capacity": False
|
||||
},
|
||||
"type": "senlin.policy.deletion",
|
||||
"version": 1.0
|
||||
},
|
||||
type="senlin.policy.deletion-1.0",
|
||||
updated_at=None,
|
||||
user_id="2d7aca950f3e465d8ef0c81720faf6ff"
|
||||
)
|
||||
fake_policy.name = "validated_policy"
|
||||
fake_policy.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.validate_policy = mock.Mock(return_value=fake_policy)
|
||||
|
||||
def test_policy_validate(self):
|
||||
arglist = ['--spec-file', self.spec_path]
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.validate_policy.assert_called_with(**self.defaults)
|
||||
|
||||
policy = self.mock_client.validate_policy(**self.defaults)
|
||||
|
||||
self.assertEqual("5f1cc92b578e4e25a3b284179cf20a9b", policy.project_id)
|
||||
self.assertEqual({}, policy.data)
|
||||
self.assertIsNone(policy.id)
|
||||
self.assertEqual("validated_policy", policy.name)
|
||||
self.assertEqual("senlin.policy.deletion-1.0", policy.type)
|
||||
self.assertIsNone(policy.updated_at)
|
@ -1,89 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib import exceptions as exc
|
||||
|
||||
from senlinclient.tests.unit.v1 import fakes
|
||||
from senlinclient.v1 import policy_type as osc_policy_type
|
||||
|
||||
|
||||
class TestPolicyType(fakes.TestClusteringv1):
|
||||
def setUp(self):
|
||||
super(TestPolicyType, self).setUp()
|
||||
self.mock_client = self.app.client_manager.clustering
|
||||
|
||||
|
||||
class TestPolicyTypeList(TestPolicyType):
|
||||
expected_columns = ['name', 'version', 'support_status']
|
||||
pt1 = mock.Mock(
|
||||
schema={'foo': 'bar'},
|
||||
support_status={
|
||||
"1.0": [{"status": "SUPPORTED", "since": "2016.10"}]
|
||||
}
|
||||
)
|
||||
pt1.name = 'BBB'
|
||||
pt2 = mock.Mock(
|
||||
schema={'foo': 'bar'},
|
||||
support_status={
|
||||
"1.0": [{"status": "DEPRECATED", "since": "2016.01"}]
|
||||
}
|
||||
)
|
||||
pt2.name = 'AAA'
|
||||
list_response = [pt1, pt2]
|
||||
expected_rows = [
|
||||
('AAA', '1.0', 'DEPRECATED since 2016.01'),
|
||||
('BBB', '1.0', 'SUPPORTED since 2016.10')
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(TestPolicyTypeList, self).setUp()
|
||||
self.cmd = osc_policy_type.PolicyTypeList(self.app, None)
|
||||
self.mock_client.policy_types = mock.Mock(
|
||||
return_value=self.list_response)
|
||||
|
||||
def test_policy_type_list(self):
|
||||
arglist = []
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, rows = self.cmd.take_action(parsed_args)
|
||||
|
||||
self.mock_client.policy_types.assert_called_with()
|
||||
self.assertEqual(self.expected_columns, columns)
|
||||
self.assertEqual(self.expected_rows, rows)
|
||||
|
||||
|
||||
class TestPolicyTypeShow(TestPolicyType):
|
||||
|
||||
def setUp(self):
|
||||
super(TestPolicyTypeShow, self).setUp()
|
||||
self.cmd = osc_policy_type.PolicyTypeShow(self.app, None)
|
||||
fake_pt = mock.Mock(schema={'foo': 'bar'})
|
||||
fake_pt.name = 'senlin.policy.deletion-1.0'
|
||||
self.mock_client.get_policy_type = mock.Mock(return_value=fake_pt)
|
||||
|
||||
def test_policy_type_show(self):
|
||||
arglist = ['os.heat.stack-1.0']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.get_policy_type.assert_called_once_with(
|
||||
'os.heat.stack-1.0')
|
||||
|
||||
def test_policy_type_show_not_found(self):
|
||||
arglist = ['senlin.policy.deletion-1.0']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.get_policy_type.side_effect = (
|
||||
sdk_exc.ResourceNotFound())
|
||||
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertEqual('Policy Type not found: senlin.policy.deletion-1.0',
|
||||
str(error))
|
@ -1,449 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import mock
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib import exceptions as exc
|
||||
from osc_lib import utils
|
||||
import six
|
||||
|
||||
from senlinclient.tests.unit.v1 import fakes
|
||||
from senlinclient.v1 import profile as osc_profile
|
||||
|
||||
|
||||
class TestProfile(fakes.TestClusteringv1):
|
||||
def setUp(self):
|
||||
super(TestProfile, self).setUp()
|
||||
self.mock_client = self.app.client_manager.clustering
|
||||
|
||||
|
||||
class TestProfileShow(TestProfile):
|
||||
response = {"profile": {
|
||||
"created_at": "2015-03-01T14:28:25",
|
||||
"domain": 'false',
|
||||
"id": "7fa885cd-fa39-4531-a42d-780af95c84a4",
|
||||
"metadata": {},
|
||||
"name": "test_prof1",
|
||||
"project": "42d9e9663331431f97b75e25136307ff",
|
||||
"spec": {
|
||||
"disable_rollback": 'false',
|
||||
"environment": {
|
||||
"resource_registry": {
|
||||
"os.heat.server": "OS::Heat::Server"
|
||||
}
|
||||
},
|
||||
"files": {
|
||||
"file:///opt/stack/senlin/examples/profiles/test_script.sh":
|
||||
"#!/bin/bash\n\necho \"this is a test script file\"\n"
|
||||
},
|
||||
"parameters": {},
|
||||
"template": {
|
||||
"heat_template_version": "2014-10-16",
|
||||
"outputs": {
|
||||
"result": {
|
||||
"value": {
|
||||
"get_attr": [
|
||||
"random",
|
||||
"value"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": {
|
||||
"file": {
|
||||
"default": {
|
||||
"get_file": "file:///opt/stack/senlin/"
|
||||
"examples/profiles/test_script.sh"
|
||||
},
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"resources": {
|
||||
"random": {
|
||||
"properties": {
|
||||
"length": 64
|
||||
},
|
||||
"type": "OS::Heat::RandomString"
|
||||
}
|
||||
},
|
||||
"timeout": 60
|
||||
},
|
||||
"type": "os.heat.stack",
|
||||
"version": "1.0"
|
||||
},
|
||||
"type": "os.heat.stack-1.0",
|
||||
"updated_at": 'null',
|
||||
"user": "5e5bf8027826429c96af157f68dc9072"
|
||||
}}
|
||||
|
||||
def setUp(self):
|
||||
super(TestProfileShow, self).setUp()
|
||||
self.cmd = osc_profile.ShowProfile(self.app, None)
|
||||
fake_profile = mock.Mock(
|
||||
created_at="2015-03-01T14:28:25",
|
||||
domain_id=None,
|
||||
id="7fa885cd-fa39-4531-a42d-780af95c84a4",
|
||||
metadata={},
|
||||
project_id="42d9e9663331431f97b75e25136307ff",
|
||||
spec={"foo": 'bar'},
|
||||
type="os.heat.stack-1.0",
|
||||
updated_at=None,
|
||||
user_id="5e5bf8027826429c96af157f68dc9072"
|
||||
)
|
||||
fake_profile.name = "test_prof1"
|
||||
fake_profile.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.get_profile = mock.Mock(return_value=fake_profile)
|
||||
utils.get_dict_properties = mock.Mock(return_value='')
|
||||
|
||||
def test_profile_show(self):
|
||||
arglist = ['my_profile']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.get_profile.assert_called_with('my_profile')
|
||||
profile = self.mock_client.get_profile('my_profile')
|
||||
self.assertEqual("42d9e9663331431f97b75e25136307ff",
|
||||
profile.project_id)
|
||||
self.assertEqual("7fa885cd-fa39-4531-a42d-780af95c84a4", profile.id)
|
||||
self.assertEqual({}, profile.metadata)
|
||||
self.assertEqual("test_prof1", profile.name)
|
||||
self.assertEqual("os.heat.stack-1.0", profile.type)
|
||||
|
||||
def test_profile_show_not_found(self):
|
||||
arglist = ['my_profile']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.get_profile.side_effect = sdk_exc.ResourceNotFound()
|
||||
self.assertRaises(
|
||||
exc.CommandError,
|
||||
self.cmd.take_action,
|
||||
parsed_args)
|
||||
|
||||
|
||||
class TestProfileList(TestProfile):
|
||||
|
||||
def setUp(self):
|
||||
super(TestProfileList, self).setUp()
|
||||
self.cmd = osc_profile.ListProfile(self.app, None)
|
||||
fake_profile = mock.Mock(
|
||||
created_at="2015-03-01T14:28:25",
|
||||
domain_id=None,
|
||||
id="7fa885cd-fa39-4531-a42d-780af95c84a4",
|
||||
metadata={},
|
||||
project_id="42d9e9663331431f97b75e25136307ff",
|
||||
spec={"foo": 'bar'},
|
||||
type="os.heat.stack-1.0",
|
||||
updated_at=None,
|
||||
user_id="5e5bf8027826429c96af157f68dc9072"
|
||||
)
|
||||
fake_profile.name = "test_profile"
|
||||
fake_profile.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.profiles = mock.Mock(return_value=[fake_profile])
|
||||
self.defaults = {
|
||||
'limit': None,
|
||||
'marker': None,
|
||||
'sort': None,
|
||||
'global_project': False,
|
||||
}
|
||||
self.columns = ['id', 'name', 'type', 'created_at']
|
||||
|
||||
def test_profile_list_defaults(self):
|
||||
arglist = []
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.profiles.assert_called_with(**self.defaults)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_profile_list_full_id(self):
|
||||
arglist = ['--full-id']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.profiles.assert_called_with(**self.defaults)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_profile_list_limit(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['limit'] = '3'
|
||||
arglist = ['--limit', '3']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.profiles.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_profile_list_sort(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'id:asc'
|
||||
arglist = ['--sort', 'id:asc']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.profiles.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_profile_list_sort_invalid_key(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'bad_key'
|
||||
arglist = ['--sort', 'bad_key']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.profiles.side_effect = sdk_exc.HttpException()
|
||||
self.assertRaises(sdk_exc.HttpException,
|
||||
self.cmd.take_action, parsed_args)
|
||||
|
||||
def test_profile_list_sort_invalid_direction(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'id:bad_direction'
|
||||
arglist = ['--sort', 'id:bad_direction']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.profiles.side_effect = sdk_exc.HttpException()
|
||||
self.assertRaises(sdk_exc.HttpException,
|
||||
self.cmd.take_action, parsed_args)
|
||||
|
||||
def test_profile_list_filter(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['name'] = 'my_profile'
|
||||
arglist = ['--filter', 'name=my_profile']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.profiles.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
|
||||
class TestProfileDelete(TestProfile):
|
||||
|
||||
def setUp(self):
|
||||
super(TestProfileDelete, self).setUp()
|
||||
self.cmd = osc_profile.DeleteProfile(self.app, None)
|
||||
self.mock_client.delete_profile = mock.Mock()
|
||||
|
||||
def test_profile_delete(self):
|
||||
arglist = ['profile1', 'profile2', 'profile3']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.delete_profile.assert_has_calls(
|
||||
[mock.call('profile1', False), mock.call('profile2', False),
|
||||
mock.call('profile3', False)]
|
||||
)
|
||||
|
||||
def test_profile_delete_force(self):
|
||||
arglist = ['profile1', 'profile2', 'profile3', '--force']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.delete_profile.assert_has_calls(
|
||||
[mock.call('profile1', False), mock.call('profile2', False),
|
||||
mock.call('profile3', False)]
|
||||
)
|
||||
|
||||
def test_profile_delete_not_found(self):
|
||||
arglist = ['my_profile']
|
||||
self.mock_client.delete_profile.side_effect = sdk_exc.ResourceNotFound
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertIn('Failed to delete 1 of the 1 specified profile(s).',
|
||||
str(error))
|
||||
|
||||
def test_profile_delete_one_found_one_not_found(self):
|
||||
arglist = ['profile1', 'profile2']
|
||||
self.mock_client.delete_profile.side_effect = (
|
||||
[None, sdk_exc.ResourceNotFound]
|
||||
)
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
error = self.assertRaises(exc.CommandError,
|
||||
self.cmd.take_action, parsed_args)
|
||||
self.mock_client.delete_profile.assert_has_calls(
|
||||
[mock.call('profile1', False), mock.call('profile2', False)]
|
||||
)
|
||||
self.assertEqual('Failed to delete 1 of the 2 specified profile(s).',
|
||||
str(error))
|
||||
|
||||
@mock.patch('sys.stdin', spec=six.StringIO)
|
||||
def test_profile_delete_prompt_yes(self, mock_stdin):
|
||||
arglist = ['my_profile']
|
||||
mock_stdin.isatty.return_value = True
|
||||
mock_stdin.readline.return_value = 'y'
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
mock_stdin.readline.assert_called_with()
|
||||
self.mock_client.delete_profile.assert_called_with('my_profile',
|
||||
False)
|
||||
|
||||
@mock.patch('sys.stdin', spec=six.StringIO)
|
||||
def test_profile_delete_prompt_no(self, mock_stdin):
|
||||
arglist = ['my_profile']
|
||||
mock_stdin.isatty.return_value = True
|
||||
mock_stdin.readline.return_value = 'n'
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
mock_stdin.readline.assert_called_with()
|
||||
self.mock_client.delete_profile.assert_not_called()
|
||||
|
||||
|
||||
class TestProfileCreate(TestProfile):
|
||||
|
||||
spec_path = 'senlinclient/tests/test_specs/nova_server.yaml'
|
||||
|
||||
def setUp(self):
|
||||
super(TestProfileCreate, self).setUp()
|
||||
self.cmd = osc_profile.CreateProfile(self.app, None)
|
||||
fake_profile = mock.Mock(
|
||||
created_at="2015-03-01T14:28:25",
|
||||
domain_id=None,
|
||||
id="7fa885cd-fa39-4531-a42d-780af95c84a4",
|
||||
metadata={},
|
||||
project_id="42d9e9663331431f97b75e25136307ff",
|
||||
spec={"foo": 'bar'},
|
||||
type="os.heat.stack-1.0",
|
||||
updated_at=None,
|
||||
user_id="5e5bf8027826429c96af157f68dc9072"
|
||||
)
|
||||
fake_profile.name = "test_profile"
|
||||
fake_profile.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.create_profile = mock.Mock(return_value=fake_profile)
|
||||
self.mock_client.get_profile = mock.Mock(return_value=fake_profile)
|
||||
utils.get_dict_properties = mock.Mock(return_value='')
|
||||
self.defaults = {
|
||||
"spec": {
|
||||
"version": 1.0,
|
||||
"type": "os.nova.server",
|
||||
"properties": {
|
||||
"flavor": 1,
|
||||
"name": "cirros_server",
|
||||
"image": "cirros-0.3.4-x86_64-uec"
|
||||
},
|
||||
},
|
||||
"name": "my_profile",
|
||||
"metadata": {}
|
||||
}
|
||||
|
||||
def test_profile_create_defaults(self):
|
||||
arglist = ['my_profile', '--spec-file', self.spec_path]
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.mock_client.create_profile.assert_called_with(**self.defaults)
|
||||
|
||||
def test_profile_create_metadata(self):
|
||||
arglist = ['my_profile', '--spec-file', self.spec_path,
|
||||
'--metadata', 'key1=value1']
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['metadata'] = {'key1': 'value1'}
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.create_profile.assert_called_with(**kwargs)
|
||||
|
||||
|
||||
class TestProfileUpdate(TestProfile):
|
||||
|
||||
def setUp(self):
|
||||
super(TestProfileUpdate, self).setUp()
|
||||
self.cmd = osc_profile.UpdateProfile(self.app, None)
|
||||
fake_profile = mock.Mock(
|
||||
created_at="2015-03-01T14:28:25",
|
||||
domain_id=None,
|
||||
id="7fa885cd-fa39-4531-a42d-780af95c84a4",
|
||||
metadata={},
|
||||
project_id="42d9e9663331431f97b75e25136307ff",
|
||||
spec={"foo": 'bar'},
|
||||
type="os.heat.stack-1.0",
|
||||
updated_at=None,
|
||||
user_id="5e5bf8027826429c96af157f68dc9072"
|
||||
)
|
||||
fake_profile.name = "test_profile"
|
||||
fake_profile.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.update_profile = mock.Mock(return_value=fake_profile)
|
||||
self.mock_client.get_profile = mock.Mock(return_value=fake_profile)
|
||||
self.mock_client.find_profile = mock.Mock(return_value=fake_profile)
|
||||
utils.get_dict_properties = mock.Mock(return_value='')
|
||||
|
||||
def test_profile_update_defaults(self):
|
||||
arglist = ['--name', 'new_profile', '--metadata', 'nk1=nv1;nk2=nv2',
|
||||
'e3057c77']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
defaults = {
|
||||
"name": "new_profile",
|
||||
"metadata": {
|
||||
"nk1": "nv1",
|
||||
"nk2": "nv2",
|
||||
}
|
||||
}
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.mock_client.update_profile.assert_called_with(
|
||||
"7fa885cd-fa39-4531-a42d-780af95c84a4", **defaults)
|
||||
|
||||
def test_profile_update_not_found(self):
|
||||
arglist = ['--name', 'new_profile', '--metadata', 'nk1=nv1;nk2=nv2',
|
||||
'c6b8b252']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.find_profile.return_value = None
|
||||
error = self.assertRaises(
|
||||
exc.CommandError,
|
||||
self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertIn('Profile not found: c6b8b252', str(error))
|
||||
|
||||
|
||||
class TestProfileValidate(TestProfile):
|
||||
|
||||
spec_path = 'senlinclient/tests/test_specs/nova_server.yaml'
|
||||
defaults = {
|
||||
"spec": {
|
||||
"version": 1.0,
|
||||
"type": "os.nova.server",
|
||||
"properties": {
|
||||
"flavor": 1,
|
||||
"name": "cirros_server",
|
||||
"image": "cirros-0.3.4-x86_64-uec"
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestProfileValidate, self).setUp()
|
||||
self.cmd = osc_profile.ValidateProfile(self.app, None)
|
||||
fake_profile = mock.Mock(
|
||||
created_at=None,
|
||||
domain_id=None,
|
||||
id=None,
|
||||
metadata={},
|
||||
project_id="42d9e9663331431f97b75e25136307ff",
|
||||
spec={"foo": 'bar'},
|
||||
type="os.heat.stack-1.0",
|
||||
updated_at=None,
|
||||
user_id="5e5bf8027826429c96af157f68dc9072"
|
||||
)
|
||||
fake_profile.name = "test_profile"
|
||||
fake_profile.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.validate_profile = mock.Mock(
|
||||
return_value=fake_profile)
|
||||
utils.get_dict_properties = mock.Mock(return_value='')
|
||||
|
||||
def test_profile_validate(self):
|
||||
arglist = ['--spec-file', self.spec_path]
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.validate_profile.assert_called_with(**self.defaults)
|
||||
|
||||
profile = self.mock_client.validate_profile(**self.defaults)
|
||||
|
||||
self.assertEqual("42d9e9663331431f97b75e25136307ff",
|
||||
profile.project_id)
|
||||
self.assertEqual("5e5bf8027826429c96af157f68dc9072", profile.user_id)
|
||||
self.assertIsNone(profile.id)
|
||||
self.assertEqual({}, profile.metadata)
|
||||
self.assertEqual("test_profile", profile.name)
|
||||
self.assertEqual("os.heat.stack-1.0", profile.type)
|
@ -1,95 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib import exceptions as exc
|
||||
|
||||
from senlinclient.tests.unit.v1 import fakes
|
||||
from senlinclient.v1 import profile_type as osc_profile_type
|
||||
|
||||
|
||||
class TestProfileType(fakes.TestClusteringv1):
|
||||
def setUp(self):
|
||||
super(TestProfileType, self).setUp()
|
||||
self.mock_client = self.app.client_manager.clustering
|
||||
|
||||
|
||||
class TestProfileTypeList(TestProfileType):
|
||||
|
||||
def setUp(self):
|
||||
super(TestProfileTypeList, self).setUp()
|
||||
self.cmd = osc_profile_type.ProfileTypeList(self.app, None)
|
||||
pt1 = mock.Mock(
|
||||
schema={'foo': 'bar'},
|
||||
support_status={
|
||||
"1.0": [{"status": "SUPPORTED", "since": "2016.10"}]
|
||||
}
|
||||
)
|
||||
pt1.name = "BBB"
|
||||
pt2 = mock.Mock(
|
||||
schema={'foo': 'bar'},
|
||||
support_status={
|
||||
"1.0": [{"status": "DEPRECATED", "since": "2016.01"}]
|
||||
}
|
||||
)
|
||||
pt2.name = "AAA"
|
||||
self.mock_client.profile_types = mock.Mock(return_value=[pt1, pt2])
|
||||
|
||||
def test_profile_type_list(self):
|
||||
arglist = []
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
expected_rows = [
|
||||
('AAA', '1.0', 'DEPRECATED since 2016.01'),
|
||||
('BBB', '1.0', 'SUPPORTED since 2016.10')
|
||||
]
|
||||
expected_columns = ['name', 'version', 'support_status']
|
||||
|
||||
columns, rows = self.cmd.take_action(parsed_args)
|
||||
|
||||
self.mock_client.profile_types.assert_called_with()
|
||||
self.assertEqual(expected_columns, columns)
|
||||
self.assertEqual(expected_rows, rows)
|
||||
|
||||
|
||||
class TestProfileTypeShow(TestProfileType):
|
||||
|
||||
def setUp(self):
|
||||
super(TestProfileTypeShow, self).setUp()
|
||||
self.cmd = osc_profile_type.ProfileTypeShow(self.app, None)
|
||||
fake_profile_type = mock.Mock(
|
||||
schema={'foo': 'bar'},
|
||||
support_status={
|
||||
"1.0": [{"status": "DEPRECATED", "since": "2016.01"}]
|
||||
}
|
||||
)
|
||||
fake_profile_type.name = "os.heat.stack-1.0"
|
||||
fake_profile_type.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.get_profile_type = mock.Mock(
|
||||
return_value=fake_profile_type)
|
||||
|
||||
def test_profile_type_show(self):
|
||||
arglist = ['os.heat.stack-1.0']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.get_profile_type.assert_called_once_with(
|
||||
'os.heat.stack-1.0')
|
||||
|
||||
def test_profile_type_show_not_found(self):
|
||||
arglist = ['os.heat.stack-1.1']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.get_profile_type.side_effect = (
|
||||
sdk_exc.ResourceNotFound())
|
||||
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertEqual('Profile Type not found: os.heat.stack-1.1',
|
||||
str(error))
|
@ -1,373 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import mock
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib import exceptions as exc
|
||||
from osc_lib import utils
|
||||
import six
|
||||
|
||||
from senlinclient.common.i18n import _
|
||||
from senlinclient.tests.unit.v1 import fakes
|
||||
from senlinclient.v1 import receiver as osc_receiver
|
||||
|
||||
|
||||
class TestReceiver(fakes.TestClusteringv1):
|
||||
def setUp(self):
|
||||
super(TestReceiver, self).setUp()
|
||||
self.mock_client = self.app.client_manager.clustering
|
||||
|
||||
|
||||
class TestReceiverList(TestReceiver):
|
||||
columns = ['id', 'name', 'type', 'cluster_id', 'action', 'created_at']
|
||||
defaults = {
|
||||
'global_project': False,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'sort': None,
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestReceiverList, self).setUp()
|
||||
self.cmd = osc_receiver.ListReceiver(self.app, None)
|
||||
fake_receiver = mock.Mock(
|
||||
action="CLUSTER_SCALE_OUT",
|
||||
actor={},
|
||||
channel={
|
||||
"alarm_url": "http://node1:8778/v1/webhooks/e03dd2e5-8f2e-4ec1"
|
||||
"-8c6a-74ba891e5422/trigger?V=1&count=1"
|
||||
},
|
||||
cluster_id="ae63a10b-4a90-452c-aef1-113a0b255ee3",
|
||||
created_at="2015-06-27T05:09:43",
|
||||
domain_id="Default",
|
||||
id="573aa1ba-bf45-49fd-907d-6b5d6e6adfd3",
|
||||
name="cluster_inflate",
|
||||
params={"count": "1"},
|
||||
project_id="6e18cc2bdbeb48a5b3cad2dc499f6804",
|
||||
type="webhook",
|
||||
updated_at=None,
|
||||
user_id="b4ad2d6e18cc2b9c48049f6dbe8a5b3c"
|
||||
)
|
||||
fake_receiver.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.receivers = mock.Mock(return_value=[fake_receiver])
|
||||
|
||||
def test_receiver_list_defaults(self):
|
||||
arglist = []
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.receivers.assert_called_with(**self.defaults)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_receiver_list_full_id(self):
|
||||
arglist = ['--full-id']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.receivers.assert_called_with(**self.defaults)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_receiver_list_limit(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['limit'] = '3'
|
||||
arglist = ['--limit', '3']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.receivers.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_receiver_list_sort(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'name:asc'
|
||||
arglist = ['--sort', 'name:asc']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.receivers.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_receiver_list_sort_invalid_key(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'bad_key'
|
||||
arglist = ['--sort', 'bad_key']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.receivers.side_effect = sdk_exc.HttpException()
|
||||
self.assertRaises(sdk_exc.HttpException,
|
||||
self.cmd.take_action, parsed_args)
|
||||
|
||||
def test_receiver_list_sort_invalid_direction(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['sort'] = 'name:bad_direction'
|
||||
arglist = ['--sort', 'name:bad_direction']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.receivers.side_effect = sdk_exc.HttpException()
|
||||
self.assertRaises(sdk_exc.HttpException,
|
||||
self.cmd.take_action, parsed_args)
|
||||
|
||||
def test_receiver_list_filter(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['name'] = 'my_receiver'
|
||||
arglist = ['--filter', 'name=my_receiver']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.receivers.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
def test_receiver_list_marker(self):
|
||||
kwargs = copy.deepcopy(self.defaults)
|
||||
kwargs['marker'] = 'a9448bf6'
|
||||
arglist = ['--marker', 'a9448bf6']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.receivers.assert_called_with(**kwargs)
|
||||
self.assertEqual(self.columns, columns)
|
||||
|
||||
|
||||
class TestReceiverShow(TestReceiver):
|
||||
|
||||
def setUp(self):
|
||||
super(TestReceiverShow, self).setUp()
|
||||
self.cmd = osc_receiver.ShowReceiver(self.app, None)
|
||||
fake_receiver = mock.Mock(
|
||||
action="CLUSTER_SCALE_OUT",
|
||||
actor={},
|
||||
channel={
|
||||
"alarm_url": "http://node1:8778/v1/webhooks/e03dd2e5-8f2e-4ec1"
|
||||
"-8c6a-74ba891e5422/trigger?V=1&count=1"
|
||||
},
|
||||
cluster_id="ae63a10b-4a90-452c-aef1-113a0b255ee3",
|
||||
created_at="2015-06-27T05:09:43",
|
||||
domain_id="Default",
|
||||
id="573aa1ba-bf45-49fd-907d-6b5d6e6adfd3",
|
||||
name="cluster_inflate",
|
||||
params={"count": "1"},
|
||||
project_id="6e18cc2bdbeb48a5b3cad2dc499f6804",
|
||||
type="webhook",
|
||||
updated_at=None,
|
||||
user_id="b4ad2d6e18cc2b9c48049f6dbe8a5b3c"
|
||||
)
|
||||
fake_receiver.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.get_receiver = mock.Mock(return_value=fake_receiver)
|
||||
|
||||
def test_receiver_show(self):
|
||||
arglist = ['my_receiver']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.get_receiver.assert_called_with('my_receiver')
|
||||
|
||||
def test_receiver_show_not_found(self):
|
||||
arglist = ['my_receiver']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.get_receiver.side_effect = sdk_exc.ResourceNotFound()
|
||||
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertEqual('Receiver not found: my_receiver', str(error))
|
||||
|
||||
|
||||
class TestReceiverCreate(TestReceiver):
|
||||
|
||||
args = {
|
||||
"action": "CLUSTER_SCALE_OUT",
|
||||
"cluster_id": "my_cluster",
|
||||
"name": "my_receiver",
|
||||
"params": {
|
||||
"count": "1"
|
||||
},
|
||||
"type": "webhook"
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestReceiverCreate, self).setUp()
|
||||
self.cmd = osc_receiver.CreateReceiver(self.app, None)
|
||||
fake_receiver = mock.Mock(
|
||||
action="CLUSTER_SCALE_OUT",
|
||||
actor={},
|
||||
channel={
|
||||
"alarm_url": "http://node1:8778/v1/webhooks/e03dd2e5-8f2e-4ec1"
|
||||
"-8c6a-74ba891e5422/trigger?V=1&count=1"
|
||||
},
|
||||
cluster_id="ae63a10b-4a90-452c-aef1-113a0b255ee3",
|
||||
created_at="2015-06-27T05:09:43",
|
||||
domain_id="Default",
|
||||
id="573aa1ba-bf45-49fd-907d-6b5d6e6adfd3",
|
||||
name="cluster_inflate",
|
||||
params={"count": "1"},
|
||||
project_id="6e18cc2bdbeb48a5b3cad2dc499f6804",
|
||||
type="webhook",
|
||||
updated_at=None,
|
||||
user_id="b4ad2d6e18cc2b9c48049f6dbe8a5b3c"
|
||||
)
|
||||
fake_receiver.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.create_receiver = mock.Mock(
|
||||
return_value=fake_receiver)
|
||||
self.mock_client.get_receiver = mock.Mock(
|
||||
return_value=fake_receiver)
|
||||
|
||||
def test_receiver_create_webhook(self):
|
||||
arglist = ['my_receiver', '--action', 'CLUSTER_SCALE_OUT',
|
||||
'--cluster', 'my_cluster', '--params', 'count=1',
|
||||
'--type', 'webhook']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.create_receiver.assert_called_with(**self.args)
|
||||
|
||||
def test_receiver_create_webhook_failed(self):
|
||||
arglist = ['my_receiver', '--action', 'CLUSTER_SCALE_OUT',
|
||||
'--params', 'count=1', '--type', 'webhook']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertIn(_('cluster and action parameters are required to create '
|
||||
'webhook type of receiver'), str(error))
|
||||
|
||||
def test_receiver_create_non_webhook(self):
|
||||
arglist = ['my_receiver', '--params', 'count=1',
|
||||
'--type', 'foo']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
args = copy.deepcopy(self.args)
|
||||
args['type'] = 'foo'
|
||||
args['cluster_id'] = None
|
||||
args['action'] = None
|
||||
self.mock_client.create_receiver.assert_called_with(**args)
|
||||
|
||||
|
||||
class TestReceiverUpdate(TestReceiver):
|
||||
args = {
|
||||
"action": "CLUSTER_SCALE_OUT",
|
||||
"name": "test_receiver",
|
||||
"params": {
|
||||
"count": "2"
|
||||
},
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestReceiverUpdate, self).setUp()
|
||||
self.cmd = osc_receiver.UpdateReceiver(self.app, None)
|
||||
fake_receiver = mock.Mock(
|
||||
action="CLUSTER_SCALE_IN",
|
||||
actor={},
|
||||
channel={
|
||||
"alarm_url": "http://node1:8778/v1/webhooks/e03dd2e5-8f2e-4ec1"
|
||||
"-8c6a-74ba891e5422/trigger?V=1&count=1"
|
||||
},
|
||||
created_at="2015-06-27T05:09:43",
|
||||
domain_id="Default",
|
||||
id="573aa1ba-bf45-49fd-907d-6b5d6e6adfd3",
|
||||
params={"count": "1"},
|
||||
project_id="6e18cc2bdbeb48a5b3cad2dc499f6804",
|
||||
updated_at=None,
|
||||
user_id="b4ad2d6e18cc2b9c48049f6dbe8a5b3c"
|
||||
)
|
||||
fake_receiver.name = "cluster_inflate"
|
||||
fake_receiver.action = "CLUSTER_SCALE_IN"
|
||||
fake_receiver.params = {"count": "1"}
|
||||
fake_receiver.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.update_receiver = mock.Mock(
|
||||
return_value=fake_receiver)
|
||||
self.mock_client.get_receiver = mock.Mock(return_value=fake_receiver)
|
||||
self.mock_client.find_receiver = mock.Mock(return_value=fake_receiver)
|
||||
utils.get_dict_properties = mock.Mock(return_value='')
|
||||
|
||||
def test_receiver_update_defaults(self):
|
||||
arglist = ['--name', 'test_receiver', '--action', 'CLUSTER_SCALE_OUT',
|
||||
'--params', 'count=2', '573aa1ba']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.mock_client.update_receiver.assert_called_with(
|
||||
"573aa1ba-bf45-49fd-907d-6b5d6e6adfd3", **self.args)
|
||||
|
||||
def test_receiver_update_not_found(self):
|
||||
arglist = ['--name', 'test_receiver', '--action', 'CLUSTER_SCALE_OUT',
|
||||
'--params', 'count=2', '573aa1b2']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.mock_client.find_receiver.return_value = None
|
||||
error = self.assertRaises(exc.CommandError,
|
||||
self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertIn('Receiver not found: 573aa1b2', str(error))
|
||||
|
||||
|
||||
class TestReceiverDelete(TestReceiver):
|
||||
|
||||
def setUp(self):
|
||||
super(TestReceiverDelete, self).setUp()
|
||||
self.cmd = osc_receiver.DeleteReceiver(self.app, None)
|
||||
self.mock_client.delete_receiver = mock.Mock()
|
||||
|
||||
def test_receiver_delete(self):
|
||||
arglist = ['receiver1', 'receiver2', 'receiver3']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.delete_receiver.assert_has_calls(
|
||||
[mock.call('receiver1', False), mock.call('receiver2', False),
|
||||
mock.call('receiver3', False)]
|
||||
)
|
||||
|
||||
def test_receiver_delete_force(self):
|
||||
arglist = ['receiver1', 'receiver2', 'receiver3', '--force']
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.mock_client.delete_receiver.assert_has_calls(
|
||||
[mock.call('receiver1', False), mock.call('receiver2', False),
|
||||
mock.call('receiver3', False)]
|
||||
)
|
||||
|
||||
def test_receiver_delete_not_found(self):
|
||||
arglist = ['my_receiver']
|
||||
self.mock_client.delete_receiver.side_effect = (
|
||||
sdk_exc.ResourceNotFound)
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.assertIn('Failed to delete 1 of the 1 specified receiver(s).',
|
||||
str(error))
|
||||
|
||||
def test_receiver_delete_one_found_one_not_found(self):
|
||||
arglist = ['receiver1', 'receiver2']
|
||||
self.mock_client.delete_receiver.side_effect = (
|
||||
[None, sdk_exc.ResourceNotFound]
|
||||
)
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
error = self.assertRaises(exc.CommandError,
|
||||
self.cmd.take_action, parsed_args)
|
||||
self.mock_client.delete_receiver.assert_has_calls(
|
||||
[mock.call('receiver1', False), mock.call('receiver2', False)]
|
||||
)
|
||||
self.assertEqual('Failed to delete 1 of the 2 specified receiver(s).',
|
||||
str(error))
|
||||
|
||||
@mock.patch('sys.stdin', spec=six.StringIO)
|
||||
def test_receiver_delete_prompt_yes(self, mock_stdin):
|
||||
arglist = ['my_receiver']
|
||||
mock_stdin.isatty.return_value = True
|
||||
mock_stdin.readline.return_value = 'y'
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
mock_stdin.readline.assert_called_with()
|
||||
self.mock_client.delete_receiver.assert_called_with('my_receiver',
|
||||
False)
|
||||
|
||||
@mock.patch('sys.stdin', spec=six.StringIO)
|
||||
def test_receiver_delete_prompt_no(self, mock_stdin):
|
||||
arglist = ['my_receiver']
|
||||
mock_stdin.isatty.return_value = True
|
||||
mock_stdin.readline.return_value = 'n'
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
mock_stdin.readline.assert_called_with()
|
||||
self.mock_client.delete_receiver.assert_not_called()
|
@ -1,43 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import mock
|
||||
|
||||
from senlinclient.tests.unit.v1 import fakes
|
||||
from senlinclient.v1 import service as osc_service
|
||||
|
||||
|
||||
class TestServiceList(fakes.TestClusteringv1):
|
||||
columns = ['binary', 'host', 'status', 'state', 'updated_at',
|
||||
'disabled_reason']
|
||||
|
||||
def setUp(self):
|
||||
super(TestServiceList, self).setUp()
|
||||
self.mock_client = self.app.client_manager.clustering
|
||||
self.cmd = osc_service.ListService(self.app, None)
|
||||
fake_service = mock.Mock(
|
||||
Binary='senlin-engine',
|
||||
Host='Host1',
|
||||
Status='enabled',
|
||||
State='up',
|
||||
Updated_at=None,
|
||||
Disabled_Reason=None,
|
||||
)
|
||||
fake_service.name = 'test_service'
|
||||
fake_service.to_dict = mock.Mock(return_value={})
|
||||
self.mock_client.services = mock.Mock(return_value=[fake_service])
|
||||
|
||||
def test_service(self):
|
||||
arglist = []
|
||||
parsed_args = self.check_parser(self.cmd, arglist, [])
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.mock_client.services.assert_called_with()
|
||||
self.assertEqual(self.columns, columns)
|
File diff suppressed because it is too large
Load Diff
@ -1,152 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Clustering v1 action implementations"""
|
||||
|
||||
import logging
|
||||
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib.command import command
|
||||
from osc_lib import exceptions as exc
|
||||
from osc_lib import utils
|
||||
|
||||
from senlinclient.common.i18n import _
|
||||
from senlinclient.common import utils as senlin_utils
|
||||
|
||||
|
||||
class ListAction(command.Lister):
|
||||
"""List actions."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListAction")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListAction, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--filters',
|
||||
metavar='<"key1=value1;key2=value2...">',
|
||||
help=_("Filter parameters to apply on returned actions. "
|
||||
"This can be specified multiple times, or once with "
|
||||
"parameters separated by a semicolon. The valid filter "
|
||||
"keys are: ['name', 'target', 'action', 'status']. "
|
||||
"NOTICE: The value of 'target', if provided, "
|
||||
"must be a full ID."),
|
||||
action='append'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar='<key>[:<direction>]',
|
||||
help=_("Sorting option which is a string containing a list of "
|
||||
"keys separated by commas. Each key can be optionally "
|
||||
"appended by a sort direction (:asc or :desc). The valid "
|
||||
"sort keys are: ['name', 'target', 'action', 'created_at',"
|
||||
" 'status']")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
metavar='<limit>',
|
||||
help=_('Limit the number of actions returned')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<id>',
|
||||
help=_('Only return actions that appear after the given action ID')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--global-project',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=_('Whether actions from all projects should be listed. '
|
||||
' Default to False. Setting this to True may demand '
|
||||
'for an admin privilege')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--full-id',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=_('Print full IDs in list')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
columns = ['id', 'name', 'action', 'status', 'target_id', 'depends_on',
|
||||
'depended_by', 'created_at']
|
||||
|
||||
queries = {
|
||||
'sort': parsed_args.sort,
|
||||
'limit': parsed_args.limit,
|
||||
'marker': parsed_args.marker,
|
||||
'global_project': parsed_args.global_project,
|
||||
}
|
||||
|
||||
if parsed_args.filters:
|
||||
queries.update(senlin_utils.format_parameters(parsed_args.filters))
|
||||
|
||||
actions = senlin_client.actions(**queries)
|
||||
|
||||
formatters = {}
|
||||
s = None
|
||||
if not parsed_args.full_id:
|
||||
s = 8
|
||||
formatters['id'] = lambda x: x[:s]
|
||||
formatters['target_id'] = lambda x: x[:s]
|
||||
|
||||
formatters['depends_on'] = lambda x: '\n'.join(a[:s] for a in x)
|
||||
formatters['depended_by'] = lambda x: '\n'.join(a[:s] for a in x)
|
||||
|
||||
return (
|
||||
columns,
|
||||
(utils.get_item_properties(a, columns,
|
||||
formatters=formatters)
|
||||
for a in actions)
|
||||
)
|
||||
|
||||
|
||||
class ShowAction(command.ShowOne):
|
||||
"""Show detailed info about the specified action."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ShowAction")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowAction, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'action',
|
||||
metavar='<action>',
|
||||
help=_('Name or ID of the action to show the details for')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
try:
|
||||
action = senlin_client.get_action(parsed_args.action)
|
||||
except sdk_exc.ResourceNotFound:
|
||||
raise exc.CommandError(_('Action not found: %s')
|
||||
% parsed_args.action)
|
||||
|
||||
formatters = {
|
||||
'inputs': senlin_utils.json_formatter,
|
||||
'outputs': senlin_utils.json_formatter,
|
||||
'metadata': senlin_utils.json_formatter,
|
||||
'data': senlin_utils.json_formatter,
|
||||
'depends_on': senlin_utils.list_formatter,
|
||||
'depended_by': senlin_utils.list_formatter,
|
||||
}
|
||||
data = action.to_dict()
|
||||
columns = sorted(data.keys())
|
||||
return columns, utils.get_dict_properties(data, columns,
|
||||
formatters=formatters)
|
@ -1,48 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Clustering v1 build_info action implementations"""
|
||||
|
||||
import logging
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils
|
||||
|
||||
from senlinclient.common import utils as senlin_utils
|
||||
|
||||
|
||||
class BuildInfo(command.ShowOne):
|
||||
"""Retrieve build information."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".BuildInfo")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(BuildInfo, self).get_parser(prog_name)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
result = senlin_client.get_build_info()
|
||||
|
||||
formatters = {
|
||||
'api': senlin_utils.json_formatter,
|
||||
'engine': senlin_utils.json_formatter,
|
||||
}
|
||||
data = {
|
||||
'api': result.api,
|
||||
'engine': result.engine,
|
||||
}
|
||||
columns = ['api', 'engine']
|
||||
return columns, utils.get_dict_properties(data, columns,
|
||||
formatters=formatters)
|
@ -1,478 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from senlinclient.common import sdk
|
||||
|
||||
|
||||
class Client(object):
|
||||
|
||||
def __init__(self, prof=None, user_agent=None, **kwargs):
|
||||
self.conn = sdk.create_connection(prof=prof, user_agent=user_agent,
|
||||
**kwargs)
|
||||
self.service = self.conn.cluster
|
||||
|
||||
######################################################################
|
||||
# The following operations are interfaces exposed to other software
|
||||
# which invokes senlinclient today.
|
||||
# These methods form a temporary translation layer. This layer will be
|
||||
# useless when OpenStackSDK has been adopted all senlin resources.
|
||||
######################################################################
|
||||
|
||||
def profile_types(self, **query):
|
||||
"""List profile types
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#list-profile-types
|
||||
"""
|
||||
return self.service.profile_types(**query)
|
||||
|
||||
def get_profile_type(self, profile_type):
|
||||
"""Show profile type details
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#show-profile-type-details
|
||||
"""
|
||||
return self.service.get_profile_type(profile_type)
|
||||
|
||||
def profiles(self, **query):
|
||||
"""List profiles
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#list-profiles
|
||||
"""
|
||||
return self.service.profiles(**query)
|
||||
|
||||
def create_profile(self, **attrs):
|
||||
"""Create a profile
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#create-profile
|
||||
"""
|
||||
return self.service.create_profile(**attrs)
|
||||
|
||||
def get_profile(self, profile):
|
||||
"""Show profile details
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#show-profile-details
|
||||
"""
|
||||
return self.service.get_profile(profile)
|
||||
|
||||
def update_profile(self, profile, **attrs):
|
||||
"""Update a profile
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#update-profile
|
||||
"""
|
||||
return self.service.update_profile(profile, **attrs)
|
||||
|
||||
def delete_profile(self, profile, ignore_missing=True):
|
||||
"""Delete a profile
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#delete-profile
|
||||
"""
|
||||
return self.service.delete_profile(profile, ignore_missing)
|
||||
|
||||
def validate_profile(self, **attrs):
|
||||
"""Validate a profile spec
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#validate-profile
|
||||
"""
|
||||
return self.service.validate_profile(**attrs)
|
||||
|
||||
def policy_types(self, **query):
|
||||
"""List policy types
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#list-policy-types
|
||||
"""
|
||||
return self.service.policy_types(**query)
|
||||
|
||||
def get_policy_type(self, policy_type):
|
||||
"""Show policy type details
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#show-policy-type-details
|
||||
"""
|
||||
return self.service.get_policy_type(policy_type)
|
||||
|
||||
def policies(self, **query):
|
||||
"""List policies
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#list-policies
|
||||
"""
|
||||
return self.service.policies(**query)
|
||||
|
||||
def create_policy(self, **attrs):
|
||||
"""Create a policy
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#create-policy
|
||||
"""
|
||||
return self.service.create_policy(**attrs)
|
||||
|
||||
def get_policy(self, policy):
|
||||
"""Show policy details
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#show-policy-details
|
||||
"""
|
||||
return self.service.get_policy(policy)
|
||||
|
||||
def update_policy(self, policy, **attrs):
|
||||
"""Update policy
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#update-policy
|
||||
"""
|
||||
return self.service.update_policy(policy, **attrs)
|
||||
|
||||
def delete_policy(self, policy, ignore_missing=True):
|
||||
"""Delete policy
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#delete-policy
|
||||
"""
|
||||
return self.service.delete_policy(policy, ignore_missing)
|
||||
|
||||
def validate_policy(self, **attrs):
|
||||
"""validate a policy spec
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#validate-policy
|
||||
"""
|
||||
return self.service.validate_policy(**attrs)
|
||||
|
||||
def clusters(self, **queries):
|
||||
"""List clusters
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#list-clusters
|
||||
"""
|
||||
return self.service.clusters(**queries)
|
||||
|
||||
def create_cluster(self, **attrs):
|
||||
"""Create a cluster
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#create-cluster
|
||||
"""
|
||||
return self.service.create_cluster(**attrs)
|
||||
|
||||
def get_cluster(self, cluster):
|
||||
"""Show cluster details
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#show-cluster-details
|
||||
"""
|
||||
return self.service.get_cluster(cluster)
|
||||
|
||||
def update_cluster(self, cluster, **attrs):
|
||||
"""Update cluster
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#update-cluster
|
||||
"""
|
||||
return self.service.update_cluster(cluster, **attrs)
|
||||
|
||||
def delete_cluster(self, cluster, ignore_missing=True):
|
||||
"""Delete cluster
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#delete-cluster
|
||||
"""
|
||||
return self.service.delete_cluster(cluster, ignore_missing)
|
||||
|
||||
def cluster_add_nodes(self, cluster, nodes):
|
||||
"""Add a node to cluster
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#add-nodes-to-a-cluster
|
||||
"""
|
||||
return self.service.cluster_add_nodes(cluster, nodes)
|
||||
|
||||
def cluster_del_nodes(self, cluster, nodes):
|
||||
"""Delete a node belongs to cluster
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#remove-nodes-from-a-cluster
|
||||
"""
|
||||
return self.service.cluster_del_nodes(cluster, nodes)
|
||||
|
||||
def cluster_replace_nodes(self, cluster, nodes):
|
||||
"""Replace the nodes in a cluster with specified nodes
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#replace-nodes-in-a-cluster
|
||||
"""
|
||||
return self.service.cluster_replace_nodes(cluster, nodes)
|
||||
|
||||
def cluster_resize(self, cluster, **params):
|
||||
"""Resize cluster
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#resize-a-cluster
|
||||
"""
|
||||
return self.service.cluster_resize(cluster, **params)
|
||||
|
||||
def cluster_scale_out(self, cluster, count):
|
||||
"""Scale out cluster
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#scale-out-a-cluster
|
||||
"""
|
||||
return self.service.cluster_scale_out(cluster, count)
|
||||
|
||||
def cluster_scale_in(self, cluster, count):
|
||||
"""Scale in cluster
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#scale-in-a-cluster
|
||||
"""
|
||||
return self.service.cluster_scale_in(cluster, count)
|
||||
|
||||
def cluster_policies(self, cluster, **queries):
|
||||
"""List all policies attached to cluster
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#list-all-cluster-policies
|
||||
"""
|
||||
return self.service.cluster_policies(cluster, **queries)
|
||||
|
||||
def get_cluster_policy(self, policy, cluster):
|
||||
"""Show details of a policy attached to cluster
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#show-cluster-policy-details
|
||||
"""
|
||||
return self.service.get_cluster_policy(policy, cluster)
|
||||
|
||||
def cluster_attach_policy(self, cluster, policy, **attrs):
|
||||
"""Attach a policy to cluster
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#attach-a-policy-to-a-cluster
|
||||
"""
|
||||
return self.service.cluster_attach_policy(cluster, policy, **attrs)
|
||||
|
||||
def cluster_detach_policy(self, cluster, policy):
|
||||
"""Detach a policy from cluster
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#detach-a-policy-from-a-cluster
|
||||
"""
|
||||
return self.service.cluster_detach_policy(cluster, policy)
|
||||
|
||||
def cluster_update_policy(self, cluster, policy, **attrs):
|
||||
"""Update the policy attachment
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#update-a-policy-on-a-cluster
|
||||
"""
|
||||
return self.service.cluster_update_policy(cluster, policy, **attrs)
|
||||
|
||||
def collect_cluster_attrs(self, cluster, path):
|
||||
"""Collect cluster attributes
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#collect-attributes-across-a-cluster
|
||||
"""
|
||||
return self.service.collect_cluster_attrs(cluster, path)
|
||||
|
||||
def check_cluster(self, cluster, **params):
|
||||
"""Check cluster's health status
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#check-a-cluster-s-health-status
|
||||
"""
|
||||
return self.service.check_cluster(cluster, **params)
|
||||
|
||||
def recover_cluster(self, cluster, **params):
|
||||
"""Recover cluster from failure state
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#recover-a-cluster-to-a-healthy-status
|
||||
"""
|
||||
return self.service.recover_cluster(cluster, **params)
|
||||
|
||||
def perform_operation_on_cluster(self, cluster, operation, **params):
|
||||
"""Perform an operation on a cluster.
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#perform-an-operation-on-a-cluster
|
||||
"""
|
||||
return self.service.perform_operation_on_cluster(cluster, operation,
|
||||
**params)
|
||||
|
||||
def nodes(self, **queries):
|
||||
"""List nodes
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#list-nodes
|
||||
"""
|
||||
return self.service.nodes(**queries)
|
||||
|
||||
def create_node(self, **attrs):
|
||||
"""Create a node
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#create-node
|
||||
"""
|
||||
return self.service.create_node(**attrs)
|
||||
|
||||
def get_node(self, node, details=False):
|
||||
"""Show node details
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#show-node-details
|
||||
"""
|
||||
return self.service.get_node(node, details=details)
|
||||
|
||||
def update_node(self, node, **attrs):
|
||||
"""Update node
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#update-node
|
||||
"""
|
||||
return self.service.update_node(node, **attrs)
|
||||
|
||||
def delete_node(self, node, ignore_missing=True):
|
||||
"""Delete node
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#delete-node
|
||||
"""
|
||||
return self.service.delete_node(node, ignore_missing)
|
||||
|
||||
def check_node(self, node, **params):
|
||||
"""Check node's health status
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#check-a-node-s-health
|
||||
"""
|
||||
return self.service.check_node(node, **params)
|
||||
|
||||
def recover_node(self, node, **params):
|
||||
"""Recover node from failure state
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#recover-a-node-to-healthy-status
|
||||
"""
|
||||
return self.service.recover_node(node, **params)
|
||||
|
||||
def perform_operation_on_node(self, node, operation, **params):
|
||||
"""Perform an operation on a node.
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/
|
||||
#perform-an-operation-on-a-node
|
||||
"""
|
||||
return self.service.perform_operation_on_node(node, operation,
|
||||
**params)
|
||||
|
||||
def receivers(self, **queries):
|
||||
"""List receivers
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#list-receivers
|
||||
"""
|
||||
return self.service.receivers(**queries)
|
||||
|
||||
def create_receiver(self, **attrs):
|
||||
"""Creare a receiver
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#create-receiver
|
||||
"""
|
||||
return self.service.create_receiver(**attrs)
|
||||
|
||||
def get_receiver(self, receiver):
|
||||
"""Show receiver details
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#show-receiver-details
|
||||
"""
|
||||
return self.service.get_receiver(receiver)
|
||||
|
||||
def update_receiver(self, receiver, **attrs):
|
||||
"""Update receiver
|
||||
|
||||
Doc link:
|
||||
http://developer.openstack.org/api-ref-clustering-v1.html#updateReceiver
|
||||
"""
|
||||
return self.service.update_receiver(receiver, **attrs)
|
||||
|
||||
def delete_receiver(self, receiver, ignore_missing=True):
|
||||
"""Delete receiver
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#delete-receiver
|
||||
"""
|
||||
return self.service.delete_receiver(receiver, ignore_missing)
|
||||
|
||||
def events(self, **queries):
|
||||
"""List events
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#list-events
|
||||
"""
|
||||
return self.service.events(**queries)
|
||||
|
||||
def get_event(self, event):
|
||||
"""Show event details
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#shows-event-details
|
||||
"""
|
||||
return self.service.get_event(event)
|
||||
|
||||
def actions(self, **queries):
|
||||
"""List actions
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#list-actions
|
||||
"""
|
||||
return self.service.actions(**queries)
|
||||
|
||||
def get_action(self, action):
|
||||
"""Show action details
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#show-action-details
|
||||
"""
|
||||
return self.service.get_action(action)
|
||||
|
||||
def services(self, **queries):
|
||||
"""List services
|
||||
|
||||
Doc link:
|
||||
https://developer.openstack.org/api-ref/clustering/#list-services
|
||||
"""
|
||||
return self.service.services(**queries)
|
File diff suppressed because it is too large
Load Diff
@ -1,156 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Clustering v1 cluster policy action implementations"""
|
||||
|
||||
import logging
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils
|
||||
from oslo_utils import strutils
|
||||
|
||||
from senlinclient.common.i18n import _
|
||||
from senlinclient.common import utils as senlin_utils
|
||||
|
||||
|
||||
class ClusterPolicyList(command.Lister):
|
||||
"""List policies from cluster."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ClusterPolicyList")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ClusterPolicyList, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--filters',
|
||||
metavar='<"key1=value1;key2=value2...">',
|
||||
help=_("Filter parameters to apply on returned results. "
|
||||
"This can be specified multiple times, or once with "
|
||||
"parameters separated by a semicolon. The valid filter "
|
||||
"keys are: ['is_enabled', 'policy_type', 'policy_name']"),
|
||||
action='append'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar='<key>[:<direction>]',
|
||||
help=_("Sorting option which is a string containing a list of "
|
||||
"keys separated by commas. Each key can be optionally "
|
||||
"appended by a sort direction (:asc or :desc). The valid "
|
||||
"sort keys are: ['enabled']")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--full-id',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=_('Print full IDs in list')
|
||||
)
|
||||
parser.add_argument(
|
||||
'cluster',
|
||||
metavar='<cluster>',
|
||||
help=_('Name or ID of cluster to query on')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
columns = ['policy_id', 'policy_name', 'policy_type', 'is_enabled']
|
||||
cluster = senlin_client.get_cluster(parsed_args.cluster)
|
||||
queries = {
|
||||
'sort': parsed_args.sort,
|
||||
}
|
||||
|
||||
if parsed_args.filters:
|
||||
queries.update(senlin_utils.format_parameters(parsed_args.filters))
|
||||
|
||||
policies = senlin_client.cluster_policies(cluster.id, **queries)
|
||||
formatters = {}
|
||||
if not parsed_args.full_id:
|
||||
formatters = {
|
||||
'policy_id': lambda x: x[:8]
|
||||
}
|
||||
return (
|
||||
columns,
|
||||
(utils.get_item_properties(p, columns,
|
||||
formatters=formatters)
|
||||
for p in policies)
|
||||
)
|
||||
|
||||
|
||||
class ClusterPolicyShow(command.ShowOne):
|
||||
"""Show a specific policy that is bound to the specified cluster."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ClusterPolicyShow")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ClusterPolicyShow, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--policy',
|
||||
metavar='<policy>',
|
||||
required=True,
|
||||
help=_('ID or name of the policy to query on')
|
||||
)
|
||||
parser.add_argument(
|
||||
'cluster',
|
||||
metavar='<cluster>',
|
||||
help=_('ID or name of the cluster to query on')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
policy = senlin_client.get_cluster_policy(parsed_args.policy,
|
||||
parsed_args.cluster)
|
||||
data = policy.to_dict()
|
||||
columns = sorted(data.keys())
|
||||
return columns, utils.get_dict_properties(data, columns)
|
||||
|
||||
|
||||
class ClusterPolicyUpdate(command.Command):
|
||||
"""Update a policy's properties on a cluster."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ClusterPolicyUpdate")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ClusterPolicyUpdate, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--policy',
|
||||
metavar='<policy>',
|
||||
required=True,
|
||||
help=_('ID or name of policy to be updated')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--enabled',
|
||||
metavar='<boolean>',
|
||||
required=True,
|
||||
help=_('Whether the policy should be enabled')
|
||||
)
|
||||
parser.add_argument(
|
||||
'cluster',
|
||||
metavar='<cluster>',
|
||||
help=_('Name or ID of cluster to operate on')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
kwargs = {
|
||||
'enabled': strutils.bool_from_string(parsed_args.enabled,
|
||||
strict=True),
|
||||
}
|
||||
|
||||
resp = senlin_client.cluster_update_policy(parsed_args.cluster,
|
||||
parsed_args.policy,
|
||||
**kwargs)
|
||||
print('Request accepted by action: %s' % resp['action'])
|
@ -1,138 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Clustering v1 event action implementations"""
|
||||
|
||||
import logging
|
||||
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib.command import command
|
||||
from osc_lib import exceptions as exc
|
||||
from osc_lib import utils
|
||||
|
||||
from senlinclient.common.i18n import _
|
||||
from senlinclient.common import utils as senlin_utils
|
||||
|
||||
|
||||
class ListEvent(command.Lister):
|
||||
"""List events."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListEvent")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListEvent, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--filters',
|
||||
metavar='<"key1=value1;key2=value2...">',
|
||||
help=_("Filter parameters to apply on returned events. "
|
||||
"This can be specified multiple times, or once with "
|
||||
"parameters separated by a semicolon. The valid filter "
|
||||
"keys are: ['level', 'otype', 'oid' ,'cluster_id', "
|
||||
"'oname', 'action']. "
|
||||
"NOTICE: The value of 'oid' or 'cluster_id', "
|
||||
"if provided, must be a full ID."),
|
||||
action='append'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
metavar='<limit>',
|
||||
help=_('Limit the number of events returned')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<id>',
|
||||
help=_('Only return events that appear after the given event ID')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar='<key>[:<direction>]',
|
||||
help=_("Sorting option which is a string containing a list of "
|
||||
"keys separated by commas. Each key can be optionally "
|
||||
"appended by a sort direction (:asc or :desc). The valid "
|
||||
"sort keys are: ['timestamp', 'level', 'oid', 'otype', "
|
||||
"'oname', 'action', 'status']")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--global-project',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=_('Whether events from all projects should be listed. '
|
||||
' Default to False. Setting this to True may demand '
|
||||
'for an admin privilege')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--full-id',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=_('Print full IDs in list')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
columns = ['id', 'generated_at', 'obj_type', 'obj_id', 'obj_name',
|
||||
'action', 'status', 'level', 'cluster_id']
|
||||
queries = {
|
||||
'sort': parsed_args.sort,
|
||||
'limit': parsed_args.limit,
|
||||
'marker': parsed_args.marker,
|
||||
'global_project': parsed_args.global_project,
|
||||
}
|
||||
|
||||
if parsed_args.filters:
|
||||
queries.update(senlin_utils.format_parameters(parsed_args.filters))
|
||||
|
||||
formatters = {}
|
||||
if parsed_args.global_project:
|
||||
columns.append('project_id')
|
||||
if not parsed_args.full_id:
|
||||
formatters['id'] = lambda x: x[:8]
|
||||
formatters['obj_id'] = lambda x: x[:8] if x else ''
|
||||
if 'project_id' in columns:
|
||||
formatters['project_id'] = lambda x: x[:8]
|
||||
formatters['cluster_id'] = lambda x: x[:8] if x else ''
|
||||
|
||||
events = senlin_client.events(**queries)
|
||||
return (columns,
|
||||
(utils.get_item_properties(e, columns,
|
||||
formatters=formatters)
|
||||
for e in events))
|
||||
|
||||
|
||||
class ShowEvent(command.ShowOne):
|
||||
"""Describe the event."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ShowEvent")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowEvent, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'event',
|
||||
metavar='<event>',
|
||||
help=_('ID of event to display details for')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
try:
|
||||
event = senlin_client.get_event(parsed_args.event)
|
||||
except sdk_exc.ResourceNotFound:
|
||||
raise exc.CommandError(_("Event not found: %s")
|
||||
% parsed_args.event)
|
||||
data = event.to_dict()
|
||||
columns = sorted(data.keys())
|
||||
return columns, utils.get_dict_properties(data, columns)
|
@ -1,398 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Clustering v1 node action implementations"""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib.command import command
|
||||
from osc_lib import exceptions as exc
|
||||
from osc_lib import utils
|
||||
from oslo_utils import strutils
|
||||
import six
|
||||
|
||||
from senlinclient.common.i18n import _
|
||||
from senlinclient.common import utils as senlin_utils
|
||||
|
||||
|
||||
class ListNode(command.Lister):
|
||||
"""Show list of nodes."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListNode")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListNode, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--cluster',
|
||||
metavar='<cluster>',
|
||||
help=_('ID or name of cluster from which nodes are to be listed')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--filters',
|
||||
metavar='<"key1=value1;key2=value2...">',
|
||||
help=_("Filter parameters to apply on returned nodes. "
|
||||
"This can be specified multiple times, or once with "
|
||||
"parameters separated by a semicolon. The valid filter"
|
||||
" keys are: ['status','name']"),
|
||||
action='append'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar='<key>[:<direction>]',
|
||||
help=_("Sorting option which is a string containing a list of "
|
||||
"keys separated by commas. Each key can be optionally "
|
||||
"appended by a sort direction (:asc or :desc). The valid "
|
||||
"sort keys are:['index', 'name', 'status', 'init_at', "
|
||||
"'created_at', 'updated_at']")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
metavar='<limit>',
|
||||
help=_('Limit the number of nodes returned')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<id>',
|
||||
help=_('Only return nodes that appear after the given node ID')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--global-project',
|
||||
default=False, action="store_true",
|
||||
help=_('Indicate that this node list should include nodes from '
|
||||
'all projects. This option is subject to access policy '
|
||||
'checking. Default is False')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--full-id',
|
||||
default=False, action="store_true",
|
||||
help=_('Print full IDs in list')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
columns = ['id', 'name', 'index', 'status', 'cluster_id',
|
||||
'physical_id', 'profile_name', 'created_at', 'updated_at']
|
||||
queries = {
|
||||
'cluster_id': parsed_args.cluster,
|
||||
'sort': parsed_args.sort,
|
||||
'limit': parsed_args.limit,
|
||||
'marker': parsed_args.marker,
|
||||
'global_project': parsed_args.global_project,
|
||||
}
|
||||
|
||||
if parsed_args.filters:
|
||||
queries.update(senlin_utils.format_parameters(parsed_args.filters))
|
||||
|
||||
nodes = senlin_client.nodes(**queries)
|
||||
if parsed_args.global_project:
|
||||
columns.append('project_id')
|
||||
if not parsed_args.full_id:
|
||||
formatters = {
|
||||
'id': lambda x: x[:8],
|
||||
'cluster_id': lambda x: x[:8] if x else '',
|
||||
'physical_id': lambda x: x[:8] if x else ''
|
||||
}
|
||||
if 'project_id' in columns:
|
||||
formatters['project_id'] = lambda x: x[:8]
|
||||
else:
|
||||
formatters = {}
|
||||
|
||||
return (
|
||||
columns,
|
||||
(utils.get_item_properties(n, columns, formatters=formatters)
|
||||
for n in nodes)
|
||||
)
|
||||
|
||||
|
||||
class ShowNode(command.ShowOne):
|
||||
"""Show detailed info about the specified node."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ShowNode")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowNode, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--details',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=_('Include physical object details')
|
||||
)
|
||||
parser.add_argument(
|
||||
'node',
|
||||
metavar='<node>',
|
||||
help=_('Name or ID of the node to show the details for')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
return _show_node(senlin_client, parsed_args.node, parsed_args.details)
|
||||
|
||||
|
||||
def _show_node(senlin_client, node_id, show_details=False):
|
||||
"""Show detailed info about the specified node."""
|
||||
|
||||
try:
|
||||
node = senlin_client.get_node(node_id, details=show_details)
|
||||
except sdk_exc.ResourceNotFound:
|
||||
raise exc.CommandError(_('Node not found: %s') % node_id)
|
||||
|
||||
formatters = {
|
||||
'metadata': senlin_utils.json_formatter,
|
||||
'data': senlin_utils.json_formatter,
|
||||
'dependents': senlin_utils.json_formatter,
|
||||
}
|
||||
data = node.to_dict()
|
||||
if show_details and data['details']:
|
||||
formatters['details'] = senlin_utils.nested_dict_formatter(
|
||||
list(data['details'].keys()), ['property', 'value'])
|
||||
columns = sorted(data.keys())
|
||||
return columns, utils.get_dict_properties(data, columns,
|
||||
formatters=formatters)
|
||||
|
||||
|
||||
class CreateNode(command.ShowOne):
|
||||
"""Create the node."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".CreateNode")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreateNode, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--cluster',
|
||||
metavar='<cluster>',
|
||||
help=_('Cluster Id or Name for this node')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--role',
|
||||
metavar='<role>',
|
||||
help=_('Role for this node in the specific cluster')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--metadata',
|
||||
metavar='<"key1=value1;key2=value2...">',
|
||||
help=_('Metadata values to be attached to the node. '
|
||||
'This can be specified multiple times, or once with '
|
||||
'key-value pairs separated by a semicolon'),
|
||||
action='append'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--profile',
|
||||
metavar='<profile>',
|
||||
required=True,
|
||||
help=_('Profile Id or Name used for this node')
|
||||
)
|
||||
parser.add_argument(
|
||||
'name',
|
||||
metavar='<node-name>',
|
||||
help=_('Name of the node to create')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
attrs = {
|
||||
'name': parsed_args.name,
|
||||
'cluster_id': parsed_args.cluster,
|
||||
'profile_id': parsed_args.profile,
|
||||
'role': parsed_args.role,
|
||||
'metadata': senlin_utils.format_parameters(parsed_args.metadata),
|
||||
}
|
||||
|
||||
node = senlin_client.create_node(**attrs)
|
||||
return _show_node(senlin_client, node.id)
|
||||
|
||||
|
||||
class UpdateNode(command.ShowOne):
|
||||
"""Update the node."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".UpdateNode")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(UpdateNode, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--name',
|
||||
metavar='<name>',
|
||||
help=_('New name for the node')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--profile',
|
||||
metavar='<profile>',
|
||||
help=_('ID or name of new profile to use')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--role',
|
||||
metavar='<role>',
|
||||
help=_('Role for this node in the specific cluster')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--metadata',
|
||||
metavar='<"key1=value1;key2=value2...">',
|
||||
help=_("Metadata values to be attached to the node. "
|
||||
"This can be specified multiple times, or once with "
|
||||
"key-value pairs separated by a semicolon. Use '{}' "
|
||||
"can clean metadata "),
|
||||
action='append'
|
||||
)
|
||||
parser.add_argument(
|
||||
'node',
|
||||
metavar='<node>',
|
||||
help=_('Name or ID of node to update')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
# Find the node first, we need its UUID
|
||||
node = senlin_client.find_node(parsed_args.node)
|
||||
if node is None:
|
||||
raise exc.CommandError(_('Node not found: %s') % parsed_args.node)
|
||||
|
||||
attrs = {
|
||||
'name': parsed_args.name,
|
||||
'role': parsed_args.role,
|
||||
'profile_id': parsed_args.profile,
|
||||
'metadata': senlin_utils.format_parameters(parsed_args.metadata),
|
||||
}
|
||||
|
||||
senlin_client.update_node(node.id, **attrs)
|
||||
return _show_node(senlin_client, node.id)
|
||||
|
||||
|
||||
class DeleteNode(command.Command):
|
||||
"""Delete the node(s)."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".DeleteNode")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(DeleteNode, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'node',
|
||||
metavar='<node>',
|
||||
nargs='+',
|
||||
help=_('Name or ID of node(s) to delete')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--force',
|
||||
action='store_true',
|
||||
help=_('Skip yes/no prompt (assume yes)')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
try:
|
||||
if not parsed_args.force and sys.stdin.isatty():
|
||||
sys.stdout.write(
|
||||
_("Are you sure you want to delete this node(s)"
|
||||
" [y/N]?"))
|
||||
prompt_response = sys.stdin.readline().lower()
|
||||
if not prompt_response.startswith('y'):
|
||||
return
|
||||
except KeyboardInterrupt: # Ctrl-c
|
||||
self.log.info('Ctrl-c detected.')
|
||||
return
|
||||
except EOFError: # Ctrl-d
|
||||
self.log.info('Ctrl-d detected')
|
||||
return
|
||||
|
||||
result = {}
|
||||
for nid in parsed_args.node:
|
||||
try:
|
||||
node = senlin_client.delete_node(nid, False)
|
||||
result[nid] = ('OK', node.location.split('/')[-1])
|
||||
except Exception as ex:
|
||||
result[nid] = ('ERROR', six.text_type(ex))
|
||||
|
||||
for rid, res in result.items():
|
||||
senlin_utils.print_action_result(rid, res)
|
||||
|
||||
|
||||
class CheckNode(command.Command):
|
||||
"""Check the node(s)."""
|
||||
log = logging.getLogger(__name__ + ".CheckNode")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CheckNode, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'node',
|
||||
metavar='<node>',
|
||||
nargs='+',
|
||||
help=_('ID or name of node(s) to check.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
for nid in parsed_args.node:
|
||||
try:
|
||||
resp = senlin_client.check_node(nid)
|
||||
except sdk_exc.ResourceNotFound:
|
||||
raise exc.CommandError(_('Node not found: %s') % nid)
|
||||
print('Node check request on node %(nid)s is accepted by '
|
||||
'action %(action)s.'
|
||||
% {'nid': nid, 'action': resp['action']})
|
||||
|
||||
|
||||
class RecoverNode(command.Command):
|
||||
"""Recover the node(s)."""
|
||||
log = logging.getLogger(__name__ + ".RecoverNode")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(RecoverNode, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--check',
|
||||
metavar='<boolean>',
|
||||
default=False,
|
||||
help=_('Whether the node(s) should check physical resource status '
|
||||
'before doing node recover. Default is false')
|
||||
)
|
||||
parser.add_argument(
|
||||
'node',
|
||||
metavar='<node>',
|
||||
nargs='+',
|
||||
help=_('ID or name of node(s) to recover.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
params = {
|
||||
'check': strutils.bool_from_string(parsed_args.check, strict=True)
|
||||
}
|
||||
|
||||
for nid in parsed_args.node:
|
||||
try:
|
||||
resp = senlin_client.recover_node(nid, **params)
|
||||
except sdk_exc.ResourceNotFound:
|
||||
raise exc.CommandError(_('Node not found: %s') % nid)
|
||||
print('Node recover request on node %(nid)s is accepted by '
|
||||
'action %(action)s.'
|
||||
% {'nid': nid, 'action': resp['action']})
|
@ -1,311 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Clustering v1 policy action implementations"""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib.command import command
|
||||
from osc_lib import exceptions as exc
|
||||
from osc_lib import utils
|
||||
|
||||
from senlinclient.common.i18n import _
|
||||
from senlinclient.common import utils as senlin_utils
|
||||
|
||||
|
||||
class ListPolicy(command.Lister):
|
||||
"""List policies that meet the criteria."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListPolicy")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListPolicy, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
metavar='<limit>',
|
||||
help=_('Limit the number of policies returned')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<id>',
|
||||
help=_('Only return policies that appear after the given policy '
|
||||
'ID')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar='<key>[:<direction>]',
|
||||
help=_("Sorting option which is a string containing a list of "
|
||||
"keys separated by commas. Each key can be optionally "
|
||||
"appended by a sort direction (:asc or :desc). The valid "
|
||||
"sort keys are: ['type', 'name', 'created_at', "
|
||||
"'updated_at']")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--filters',
|
||||
metavar='<"key1=value1;key2=value2...">',
|
||||
help=_("Filter parameters to apply on returned policies. "
|
||||
"This can be specified multiple times, or once with "
|
||||
"parameters separated by a semicolon. The valid filter "
|
||||
"keys are: ['type', 'name']"),
|
||||
action='append'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--global-project',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=_('Indicate that the list should include policies from'
|
||||
' all projects. This option is subject to access policy '
|
||||
'checking. Default is False')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--full-id',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=_('Print full IDs in list')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
columns = ['id', 'name', 'type', 'created_at']
|
||||
queries = {
|
||||
'limit': parsed_args.limit,
|
||||
'marker': parsed_args.marker,
|
||||
'sort': parsed_args.sort,
|
||||
'global_project': parsed_args.global_project,
|
||||
}
|
||||
if parsed_args.filters:
|
||||
queries.update(senlin_utils.format_parameters(parsed_args.filters))
|
||||
|
||||
policies = senlin_client.policies(**queries)
|
||||
formatters = {}
|
||||
if parsed_args.global_project:
|
||||
columns.append('project_id')
|
||||
if not parsed_args.full_id:
|
||||
formatters = {
|
||||
'id': lambda x: x[:8]
|
||||
}
|
||||
if 'project_id' in columns:
|
||||
formatters['project_id'] = lambda x: x[:8]
|
||||
|
||||
return (
|
||||
columns,
|
||||
(utils.get_item_properties(p, columns, formatters=formatters)
|
||||
for p in policies)
|
||||
)
|
||||
|
||||
|
||||
class ShowPolicy(command.ShowOne):
|
||||
"""Show the policy details."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ShowPolicy")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowPolicy, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'policy',
|
||||
metavar='<policy>',
|
||||
help=_('Name or Id of the policy to show')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
return _show_policy(senlin_client, policy_id=parsed_args.policy)
|
||||
|
||||
|
||||
def _show_policy(senlin_client, policy_id):
|
||||
try:
|
||||
policy = senlin_client.get_policy(policy_id)
|
||||
except sdk_exc.ResourceNotFound:
|
||||
raise exc.CommandError(_('Policy not found: %s') % policy_id)
|
||||
|
||||
formatters = {
|
||||
'spec': senlin_utils.json_formatter
|
||||
}
|
||||
|
||||
data = policy.to_dict()
|
||||
columns = sorted(data.keys())
|
||||
return columns, utils.get_dict_properties(data, columns,
|
||||
formatters=formatters)
|
||||
|
||||
|
||||
class CreatePolicy(command.ShowOne):
|
||||
"""Create a policy."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".CreatePolicy")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreatePolicy, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--spec-file',
|
||||
metavar='<spec-file>',
|
||||
required=True,
|
||||
help=_('The spec file used to create the policy')
|
||||
)
|
||||
parser.add_argument(
|
||||
'name',
|
||||
metavar='<name>',
|
||||
help=_('Name of the policy to create')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
spec = senlin_utils.get_spec_content(parsed_args.spec_file)
|
||||
attrs = {
|
||||
'name': parsed_args.name,
|
||||
'spec': spec,
|
||||
}
|
||||
|
||||
policy = senlin_client.create_policy(**attrs)
|
||||
return _show_policy(senlin_client, policy.id)
|
||||
|
||||
|
||||
class UpdatePolicy(command.ShowOne):
|
||||
"""Update a policy."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".UpdatePolicy")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(UpdatePolicy, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--name',
|
||||
metavar='<name>',
|
||||
help=_('New name of the policy to be updated')
|
||||
)
|
||||
parser.add_argument(
|
||||
'policy',
|
||||
metavar='<policy>',
|
||||
help=_('Name or ID of the policy to be updated')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
params = {
|
||||
'name': parsed_args.name,
|
||||
}
|
||||
policy = senlin_client.find_policy(parsed_args.policy)
|
||||
if policy is None:
|
||||
raise exc.CommandError(_('Policy not found: %s') %
|
||||
parsed_args.policy)
|
||||
senlin_client.update_policy(policy.id, **params)
|
||||
return _show_policy(senlin_client, policy_id=policy.id)
|
||||
|
||||
|
||||
class DeletePolicy(command.Command):
|
||||
"""Delete policy(s)."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".DeletePolicy")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(DeletePolicy, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'policy',
|
||||
metavar='<policy>',
|
||||
nargs='+',
|
||||
help=_('Name or ID of policy(s) to delete')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--force',
|
||||
action='store_true',
|
||||
help=_('Skip yes/no prompt (assume yes)')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
try:
|
||||
if not parsed_args.force and sys.stdin.isatty():
|
||||
sys.stdout.write(
|
||||
_("Are you sure you want to delete this policy(s)"
|
||||
" [y/N]?"))
|
||||
prompt_response = sys.stdin.readline().lower()
|
||||
if not prompt_response.startswith('y'):
|
||||
return
|
||||
except KeyboardInterrupt: # Ctrl-c
|
||||
self.log.info('Ctrl-c detected.')
|
||||
return
|
||||
except EOFError: # Ctrl-d
|
||||
self.log.info('Ctrl-d detected')
|
||||
return
|
||||
|
||||
failure_count = 0
|
||||
|
||||
for pid in parsed_args.policy:
|
||||
try:
|
||||
senlin_client.delete_policy(pid, False)
|
||||
except Exception as ex:
|
||||
failure_count += 1
|
||||
print(ex)
|
||||
if failure_count:
|
||||
raise exc.CommandError(_('Failed to delete %(count)s of the '
|
||||
'%(total)s specified policy(s).') %
|
||||
{'count': failure_count,
|
||||
'total': len(parsed_args.policy)})
|
||||
print('Policy deleted: %s' % parsed_args.policy)
|
||||
|
||||
|
||||
class ValidatePolicy(command.ShowOne):
|
||||
"""Validate a policy."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ValidatePolicy")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ValidatePolicy, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--spec-file',
|
||||
metavar='<spec-file>',
|
||||
required=True,
|
||||
help=_('The spec file of the policy to be validated')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
spec = senlin_utils.get_spec_content(parsed_args.spec_file)
|
||||
attrs = {
|
||||
'spec': spec,
|
||||
}
|
||||
|
||||
policy = senlin_client.validate_policy(**attrs)
|
||||
formatters = {
|
||||
'spec': senlin_utils.json_formatter
|
||||
}
|
||||
columns = [
|
||||
'created_at',
|
||||
'data',
|
||||
'domain',
|
||||
'id',
|
||||
'name',
|
||||
'project',
|
||||
'spec',
|
||||
'type',
|
||||
'updated_at',
|
||||
'user'
|
||||
]
|
||||
return columns, utils.get_dict_properties(policy.to_dict(), columns,
|
||||
formatters=formatters)
|
@ -1,78 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Clustering v1 policy type action implementations"""
|
||||
|
||||
import logging
|
||||
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib.command import command
|
||||
from osc_lib import exceptions as exc
|
||||
|
||||
from senlinclient.common import format_utils
|
||||
from senlinclient.common.i18n import _
|
||||
|
||||
|
||||
class PolicyTypeList(command.Lister):
|
||||
"""List the available policy types."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".PolicyTypeList")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(PolicyTypeList, self).get_parser(prog_name)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
types = senlin_client.policy_types()
|
||||
columns = ['name', 'version', 'support_status']
|
||||
results = []
|
||||
for t in types:
|
||||
for v in t.support_status.keys():
|
||||
st_list = '\n'.join([
|
||||
' since '.join((item['status'], item['since']))
|
||||
for item in t.support_status[v]
|
||||
])
|
||||
|
||||
results.append((t.name, v, st_list))
|
||||
|
||||
return columns, sorted(results)
|
||||
|
||||
|
||||
class PolicyTypeShow(format_utils.YamlFormat):
|
||||
"""Get the details about a policy type."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".PolicyTypeShow")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(PolicyTypeShow, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'type_name',
|
||||
metavar='<type-name>',
|
||||
help=_('Policy type to retrieve')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
try:
|
||||
res = senlin_client.get_policy_type(parsed_args.type_name)
|
||||
except sdk_exc.ResourceNotFound:
|
||||
raise exc.CommandError(_('Policy Type not found: %s')
|
||||
% parsed_args.type_name)
|
||||
data = res.to_dict()
|
||||
rows = data.values()
|
||||
columns = data.keys()
|
||||
return columns, rows
|
@ -1,368 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Clustering v1 profile action implementations"""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib.command import command
|
||||
from osc_lib import exceptions as exc
|
||||
from osc_lib import utils
|
||||
|
||||
from senlinclient.common.i18n import _
|
||||
from senlinclient.common import utils as senlin_utils
|
||||
|
||||
|
||||
class ShowProfile(command.ShowOne):
|
||||
"""Show profile details."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ShowProfile")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowProfile, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'profile',
|
||||
metavar='<profile>',
|
||||
help='Name or ID of profile to show',
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
return _show_profile(senlin_client, profile_id=parsed_args.profile)
|
||||
|
||||
|
||||
def _show_profile(senlin_client, profile_id):
|
||||
try:
|
||||
data = senlin_client.get_profile(profile_id)
|
||||
except sdk_exc.ResourceNotFound:
|
||||
raise exc.CommandError('Profile not found: %s' % profile_id)
|
||||
else:
|
||||
formatters = {}
|
||||
formatters['metadata'] = senlin_utils.json_formatter
|
||||
formatters['spec'] = senlin_utils.nested_dict_formatter(
|
||||
['type', 'version', 'properties'],
|
||||
['property', 'value'])
|
||||
|
||||
data = data.to_dict()
|
||||
columns = sorted(data.keys())
|
||||
return columns, utils.get_dict_properties(data, columns,
|
||||
formatters=formatters)
|
||||
|
||||
|
||||
class ListProfile(command.Lister):
|
||||
"""List profiles that meet the criteria."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListProfile")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListProfile, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
metavar='<limit>',
|
||||
help=_('Limit the number of profiles returned')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<id>',
|
||||
help=_('Only return profiles that appear after the given profile '
|
||||
'ID')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar='<key>[:<direction>]',
|
||||
help=_("Sorting option which is a string containing a list of keys"
|
||||
" separated by commas. Each key can be optionally appended "
|
||||
"by a sort direction (:asc or :desc). The valid sort_keys "
|
||||
"are:['type', 'name', 'created_at', 'updated_at']")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--filters',
|
||||
metavar='<"key1=value1;key2=value2...">',
|
||||
help=_("Filter parameters to apply on returned profiles. "
|
||||
"This can be specified multiple times, or once with "
|
||||
"parameters separated by a semicolon. The valid filter "
|
||||
"keys are: ['type', 'name']"),
|
||||
action='append'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--global-project',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=_('Indicate that the list should include profiles from'
|
||||
' all projects. This option is subject to access policy '
|
||||
'checking. Default is False')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--full-id',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=_('Print full IDs in list')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
columns = ['id', 'name', 'type', 'created_at']
|
||||
queries = {
|
||||
'limit': parsed_args.limit,
|
||||
'marker': parsed_args.marker,
|
||||
'sort': parsed_args.sort,
|
||||
'global_project': parsed_args.global_project,
|
||||
}
|
||||
if parsed_args.filters:
|
||||
queries.update(senlin_utils.format_parameters(parsed_args.filters))
|
||||
data = senlin_client.profiles(**queries)
|
||||
|
||||
formatters = {}
|
||||
if parsed_args.global_project:
|
||||
columns.append('project_id')
|
||||
if not parsed_args.full_id:
|
||||
formatters = {
|
||||
'id': lambda x: x[:8],
|
||||
}
|
||||
if 'project_id' in columns:
|
||||
formatters['project_id'] = lambda x: x[:8]
|
||||
|
||||
return (
|
||||
columns,
|
||||
(utils.get_item_properties(p, columns, formatters=formatters)
|
||||
for p in data)
|
||||
)
|
||||
|
||||
|
||||
class DeleteProfile(command.Command):
|
||||
"""Delete profile(s)."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".DeleteProfile")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(DeleteProfile, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'profile',
|
||||
metavar='<profile>',
|
||||
nargs='+',
|
||||
help=_('Name or ID of profile(s) to delete')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--force',
|
||||
action='store_true',
|
||||
help=_('Skip yes/no prompt (assume yes)')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
try:
|
||||
if not parsed_args.force and sys.stdin.isatty():
|
||||
sys.stdout.write(
|
||||
_("Are you sure you want to delete this profile(s)"
|
||||
" [y/N]?"))
|
||||
prompt_response = sys.stdin.readline().lower()
|
||||
if not prompt_response.startswith('y'):
|
||||
return
|
||||
except KeyboardInterrupt: # Ctrl-c
|
||||
self.log.info('Ctrl-c detected.')
|
||||
return
|
||||
except EOFError: # Ctrl-d
|
||||
self.log.info('Ctrl-d detected')
|
||||
return
|
||||
|
||||
failure_count = 0
|
||||
for pid in parsed_args.profile:
|
||||
try:
|
||||
senlin_client.delete_profile(pid, False)
|
||||
except Exception as ex:
|
||||
failure_count += 1
|
||||
print(ex)
|
||||
if failure_count:
|
||||
raise exc.CommandError(_('Failed to delete %(count)s of the '
|
||||
'%(total)s specified profile(s).') %
|
||||
{'count': failure_count,
|
||||
'total': len(parsed_args.profile)})
|
||||
print('Profile deleted: %s' % parsed_args.profile)
|
||||
|
||||
|
||||
class CreateProfile(command.ShowOne):
|
||||
"""Create a profile."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".CreateProfile")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreateProfile, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--metadata',
|
||||
metavar='<"key1=value1;key2=value2...">',
|
||||
help=_('Metadata values to be attached to the profile. '
|
||||
'This can be specified multiple times, or once with '
|
||||
'key-value pairs separated by a semicolon'),
|
||||
action='append'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--spec-file',
|
||||
metavar='<spec-file>',
|
||||
required=True,
|
||||
help=_('The spec file used to create the profile')
|
||||
)
|
||||
parser.add_argument(
|
||||
'name',
|
||||
metavar='<profile-name>',
|
||||
help=_('Name of the profile to create')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
spec = senlin_utils.get_spec_content(parsed_args.spec_file)
|
||||
type_name = spec.get('type', None)
|
||||
type_version = spec.get('version', None)
|
||||
properties = spec.get('properties', None)
|
||||
if type_name is None:
|
||||
raise exc.CommandError(_("Missing 'type' key in spec file."))
|
||||
if type_version is None:
|
||||
raise exc.CommandError(_("Missing 'version' key in spec file."))
|
||||
if properties is None:
|
||||
raise exc.CommandError(_("Missing 'properties' key in spec file."))
|
||||
|
||||
if type_name == 'os.heat.stack':
|
||||
stack_properties = senlin_utils.process_stack_spec(properties)
|
||||
spec['properties'] = stack_properties
|
||||
|
||||
params = {
|
||||
'name': parsed_args.name,
|
||||
'spec': spec,
|
||||
'metadata': senlin_utils.format_parameters(parsed_args.metadata),
|
||||
}
|
||||
|
||||
profile = senlin_client.create_profile(**params)
|
||||
return _show_profile(senlin_client, profile_id=profile.id)
|
||||
|
||||
|
||||
class UpdateProfile(command.ShowOne):
|
||||
"""Update a profile."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".UpdateProfile")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(UpdateProfile, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--name',
|
||||
metavar='<name>',
|
||||
help=_('The new name for the profile')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--metadata',
|
||||
metavar='<"key1=value1;key2=value2...">',
|
||||
help=_("Metadata values to be attached to the profile. "
|
||||
"This can be specified multiple times, or once with "
|
||||
"key-value pairs separated by a semicolon. Use '{}' "
|
||||
"can clean metadata "),
|
||||
action='append'
|
||||
)
|
||||
parser.add_argument(
|
||||
'profile',
|
||||
metavar='<profile>',
|
||||
help=_('Name or ID of the profile to update')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
params = {
|
||||
'name': parsed_args.name,
|
||||
}
|
||||
if parsed_args.metadata:
|
||||
params['metadata'] = senlin_utils.format_parameters(
|
||||
parsed_args.metadata)
|
||||
|
||||
# Find the profile first, we need its id
|
||||
profile = senlin_client.find_profile(parsed_args.profile)
|
||||
if profile is None:
|
||||
raise exc.CommandError(_('Profile not found: %s') %
|
||||
parsed_args.profile)
|
||||
senlin_client.update_profile(profile.id, **params)
|
||||
return _show_profile(senlin_client, profile_id=profile.id)
|
||||
|
||||
|
||||
class ValidateProfile(command.ShowOne):
|
||||
"""Validate a profile."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ValidateProfile")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ValidateProfile, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--spec-file',
|
||||
metavar='<spec-file>',
|
||||
required=True,
|
||||
help=_('The spec file of the profile to be validated')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
spec = senlin_utils.get_spec_content(parsed_args.spec_file)
|
||||
type_name = spec.get('type', None)
|
||||
type_version = spec.get('version', None)
|
||||
properties = spec.get('properties', None)
|
||||
if type_name is None:
|
||||
raise exc.CommandError(_("Missing 'type' key in spec file."))
|
||||
if type_version is None:
|
||||
raise exc.CommandError(_("Missing 'version' key in spec file."))
|
||||
if properties is None:
|
||||
raise exc.CommandError(_("Missing 'properties' key in spec file."))
|
||||
|
||||
if type_name == 'os.heat.stack':
|
||||
stack_properties = senlin_utils.process_stack_spec(properties)
|
||||
spec['properties'] = stack_properties
|
||||
|
||||
params = {
|
||||
'spec': spec,
|
||||
}
|
||||
|
||||
profile = senlin_client.validate_profile(**params)
|
||||
|
||||
formatters = {}
|
||||
formatters['metadata'] = senlin_utils.json_formatter
|
||||
formatters['spec'] = senlin_utils.nested_dict_formatter(
|
||||
['type', 'version', 'properties'],
|
||||
['property', 'value'])
|
||||
|
||||
columns = [
|
||||
'created_at',
|
||||
'domain',
|
||||
'id',
|
||||
'metadata',
|
||||
'name',
|
||||
'project_id',
|
||||
'spec',
|
||||
'type',
|
||||
'updated_at',
|
||||
'user_id'
|
||||
]
|
||||
return columns, utils.get_dict_properties(profile.to_dict(), columns,
|
||||
formatters=formatters)
|
@ -1,79 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Clustering v1 profile type action implementations"""
|
||||
|
||||
import logging
|
||||
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib.command import command
|
||||
from osc_lib import exceptions as exc
|
||||
|
||||
from senlinclient.common import format_utils
|
||||
from senlinclient.common.i18n import _
|
||||
|
||||
|
||||
class ProfileTypeList(command.Lister):
|
||||
"""List the available profile types."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ProfileTypeList")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ProfileTypeList, self).get_parser(prog_name)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
types = senlin_client.profile_types()
|
||||
columns = ['name', 'version', 'support_status']
|
||||
|
||||
results = []
|
||||
for t in types:
|
||||
for v in t.support_status.keys():
|
||||
st_list = '\n'.join([
|
||||
' since '.join((item['status'], item['since']))
|
||||
for item in t.support_status[v]
|
||||
])
|
||||
|
||||
results.append((t.name, v, st_list))
|
||||
|
||||
return columns, sorted(results)
|
||||
|
||||
|
||||
class ProfileTypeShow(format_utils.YamlFormat):
|
||||
"""Show the details about a profile type."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ProfileTypeShow")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ProfileTypeShow, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'type_name',
|
||||
metavar='<type-name>',
|
||||
help=_('Profile type to retrieve')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
try:
|
||||
res = senlin_client.get_profile_type(parsed_args.type_name)
|
||||
except sdk_exc.ResourceNotFound:
|
||||
raise exc.CommandError(_('Profile Type not found: %s')
|
||||
% parsed_args.type_name)
|
||||
data = res.to_dict()
|
||||
rows = data.values()
|
||||
columns = data.keys()
|
||||
return columns, rows
|
@ -1,319 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Clustering v1 receiver action implementations"""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from openstack import exceptions as sdk_exc
|
||||
from osc_lib.command import command
|
||||
from osc_lib import exceptions as exc
|
||||
from osc_lib import utils
|
||||
|
||||
from senlinclient.common.i18n import _
|
||||
from senlinclient.common import utils as senlin_utils
|
||||
|
||||
|
||||
class ListReceiver(command.Lister):
|
||||
"""List receivers that meet the criteria."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListReceiver")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListReceiver, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--filters',
|
||||
metavar='<"key1=value1;key2=value2...">',
|
||||
help=_("Filter parameters to apply on returned receivers. "
|
||||
"This can be specified multiple times, or once with "
|
||||
"parameters separated by a semicolon. The valid filter "
|
||||
"keys are: ['name', 'type', 'action', 'cluster_id', "
|
||||
"'user_id']"),
|
||||
action='append'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
metavar='<limit>',
|
||||
help=_('Limit the number of receivers returned')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<id>',
|
||||
help=_('Only return receivers that appear after the given '
|
||||
'receiver ID')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar='<key>[:<direction>]',
|
||||
help=_("Sorting option which is a string containing a list of "
|
||||
"keys separated by commas. Each key can be optionally "
|
||||
"appended by a sort direction (:asc or :desc). The valid "
|
||||
"sort keys are: ['name', 'type', 'action', 'cluster_id', "
|
||||
"'created_at']")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--global-project',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=_('Indicate that the list should include receivers from'
|
||||
' all projects. This option is subject to access policy '
|
||||
'checking. Default is False')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--full-id',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=_('Print full IDs in list')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
columns = ['id', 'name', 'type', 'cluster_id', 'action', 'created_at']
|
||||
queries = {
|
||||
'limit': parsed_args.limit,
|
||||
'marker': parsed_args.marker,
|
||||
'sort': parsed_args.sort,
|
||||
'global_project': parsed_args.global_project,
|
||||
}
|
||||
|
||||
if parsed_args.filters:
|
||||
queries.update(senlin_utils.format_parameters(parsed_args.filters))
|
||||
|
||||
receivers = senlin_client.receivers(**queries)
|
||||
formatters = {}
|
||||
if parsed_args.global_project:
|
||||
columns.append('project_id')
|
||||
columns.append('user_id')
|
||||
if not parsed_args.full_id:
|
||||
formatters = {
|
||||
'id': lambda x: x[:8],
|
||||
'cluster_id': lambda x: x[:8] if x else None,
|
||||
}
|
||||
if 'project_id' in columns:
|
||||
formatters['project_id'] = lambda x: x[:8]
|
||||
formatters['user_id'] = lambda x: x[:8]
|
||||
|
||||
return (
|
||||
columns,
|
||||
(utils.get_item_properties(r, columns, formatters=formatters)
|
||||
for r in receivers)
|
||||
)
|
||||
|
||||
|
||||
class ShowReceiver(command.ShowOne):
|
||||
"""Show the receiver details."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ShowReceiver")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowReceiver, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'receiver',
|
||||
metavar='<receiver>',
|
||||
help=_('Name or ID of the receiver to show')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
return _show_receiver(senlin_client, parsed_args.receiver)
|
||||
|
||||
|
||||
def _show_receiver(senlin_client, receiver_id):
|
||||
try:
|
||||
receiver = senlin_client.get_receiver(receiver_id)
|
||||
except sdk_exc.ResourceNotFound:
|
||||
raise exc.CommandError(_('Receiver not found: %s') % receiver_id)
|
||||
|
||||
formatters = {
|
||||
'actor': senlin_utils.json_formatter,
|
||||
'params': senlin_utils.json_formatter,
|
||||
'channel': senlin_utils.json_formatter,
|
||||
}
|
||||
data = receiver.to_dict()
|
||||
columns = sorted(data.keys())
|
||||
return columns, utils.get_dict_properties(data, columns,
|
||||
formatters=formatters)
|
||||
|
||||
|
||||
class CreateReceiver(command.ShowOne):
|
||||
"""Create a receiver."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".CreateReceiver")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreateReceiver, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--type',
|
||||
metavar='<type>',
|
||||
default='webhook',
|
||||
help=_('Type of the receiver to create. Receiver type can be '
|
||||
'"webhook" or "message". Default to "webhook".')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--params',
|
||||
metavar='<"key1=value1;key2=value2...">',
|
||||
help=_('A dictionary of parameters that will be passed to target '
|
||||
'action when the receiver is triggered'),
|
||||
action='append'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--cluster',
|
||||
metavar='<cluster>',
|
||||
help=_('Targeted cluster for this receiver. Required if '
|
||||
'receiver type is webhook')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--action',
|
||||
metavar='<action>',
|
||||
help=_('Name or ID of the targeted action to be triggered. '
|
||||
'Required if receiver type is webhook')
|
||||
)
|
||||
parser.add_argument(
|
||||
'name',
|
||||
metavar='<name>',
|
||||
help=_('Name of the receiver to create')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
if parsed_args.type == 'webhook':
|
||||
if (not parsed_args.cluster or not parsed_args.action):
|
||||
msg = _('cluster and action parameters are required to create '
|
||||
'webhook type of receiver.')
|
||||
raise exc.CommandError(msg)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
params = {
|
||||
'name': parsed_args.name,
|
||||
'type': parsed_args.type,
|
||||
'cluster_id': parsed_args.cluster,
|
||||
'action': parsed_args.action,
|
||||
'params': senlin_utils.format_parameters(parsed_args.params)
|
||||
}
|
||||
|
||||
receiver = senlin_client.create_receiver(**params)
|
||||
return _show_receiver(senlin_client, receiver.id)
|
||||
|
||||
|
||||
class UpdateReceiver(command.ShowOne):
|
||||
"""Create a receiver."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".UpdateReceiver")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(UpdateReceiver, self).get_parser(prog_name)
|
||||
|
||||
parser.add_argument(
|
||||
'--name',
|
||||
metavar='<name>',
|
||||
help=_('Name of the receiver to create')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--action',
|
||||
metavar='<action>',
|
||||
help=_('Name or ID of the targeted action to be triggered. '
|
||||
'Required if receiver type is webhook')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--params',
|
||||
metavar='<"key1=value1;key2=value2...">',
|
||||
help=_('A dictionary of parameters that will be passed to target '
|
||||
'action when the receiver is triggered'),
|
||||
action='append'
|
||||
)
|
||||
parser.add_argument(
|
||||
'receiver',
|
||||
metavar='<receiver>',
|
||||
help=_('Name or ID of receiver(s) to update')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
params = {
|
||||
'name': parsed_args.name,
|
||||
'action': parsed_args.action,
|
||||
'params': senlin_utils.format_parameters(parsed_args.params)
|
||||
}
|
||||
|
||||
receiver = senlin_client.find_receiver(parsed_args.receiver)
|
||||
if receiver is None:
|
||||
raise exc.CommandError(_('Receiver not found: %s') %
|
||||
parsed_args.receiver)
|
||||
senlin_client.update_receiver(receiver.id, **params)
|
||||
return _show_receiver(senlin_client, receiver_id=receiver.id)
|
||||
|
||||
|
||||
class DeleteReceiver(command.Command):
|
||||
"""Delete receiver(s)."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".DeleteReceiver")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(DeleteReceiver, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'receiver',
|
||||
metavar='<receiver>',
|
||||
nargs='+',
|
||||
help=_('Name or ID of receiver(s) to delete')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--force',
|
||||
action='store_true',
|
||||
help=_('Skip yes/no prompt (assume yes)')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
|
||||
try:
|
||||
if not parsed_args.force and sys.stdin.isatty():
|
||||
sys.stdout.write(
|
||||
_("Are you sure you want to delete this receiver(s)"
|
||||
" [y/N]?"))
|
||||
prompt_response = sys.stdin.readline().lower()
|
||||
if not prompt_response.startswith('y'):
|
||||
return
|
||||
except KeyboardInterrupt: # Ctrl-c
|
||||
self.log.info('Ctrl-c detected.')
|
||||
return
|
||||
except EOFError: # Ctrl-d
|
||||
self.log.info('Ctrl-d detected')
|
||||
return
|
||||
|
||||
failure_count = 0
|
||||
|
||||
for rid in parsed_args.receiver:
|
||||
try:
|
||||
senlin_client.delete_receiver(rid, False)
|
||||
except Exception as ex:
|
||||
failure_count += 1
|
||||
print(ex)
|
||||
if failure_count:
|
||||
raise exc.CommandError(_('Failed to delete %(count)s of the '
|
||||
'%(total)s specified receiver(s).') %
|
||||
{'count': failure_count,
|
||||
'total': len(parsed_args.receiver)})
|
||||
print('Receiver deleted: %s' % parsed_args.receiver)
|
@ -1,43 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils
|
||||
|
||||
|
||||
class ListService(command.Lister):
|
||||
"""Show a list of all running services."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListService")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListService, self).get_parser(prog_name)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
senlin_client = self.app.client_manager.clustering
|
||||
columns = ['binary', 'host', 'status', 'state', 'updated_at',
|
||||
'disabled_reason']
|
||||
|
||||
queries = {}
|
||||
result = senlin_client.services(**queries)
|
||||
|
||||
formatters = {}
|
||||
return (
|
||||
columns,
|
||||
(utils.get_item_properties(s, columns, formatters=formatters)
|
||||
for s in result)
|
||||
)
|
File diff suppressed because it is too large
Load Diff
117
setup.cfg
117
setup.cfg
@ -1,117 +0,0 @@
|
||||
[metadata]
|
||||
name = python-senlinclient
|
||||
summary = OpenStack Clustering API Client Library
|
||||
description-file =
|
||||
README.rst
|
||||
author = OpenStack
|
||||
author-email = openstack-dev@lists.openstack.org
|
||||
home-page = http://docs.openstack.org/developer/senlin/
|
||||
classifier =
|
||||
Environment :: OpenStack
|
||||
Intended Audience :: Information Technology
|
||||
Intended Audience :: System Administrators
|
||||
License :: OSI Approved :: Apache Software License
|
||||
Operating System :: POSIX :: Linux
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 2
|
||||
Programming Language :: Python :: 2.7
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3.5
|
||||
|
||||
[files]
|
||||
packages =
|
||||
senlinclient
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
senlin = senlinclient.shell:main
|
||||
|
||||
openstack.cli.extension =
|
||||
clustering = senlinclient.plugin
|
||||
|
||||
openstack.clustering.v1 =
|
||||
cluster_action_list = senlinclient.v1.action:ListAction
|
||||
cluster_action_show = senlinclient.v1.action:ShowAction
|
||||
cluster_build_info = senlinclient.v1.build_info:BuildInfo
|
||||
cluster_check = senlinclient.v1.cluster:CheckCluster
|
||||
cluster_create = senlinclient.v1.cluster:CreateCluster
|
||||
cluster_delete = senlinclient.v1.cluster:DeleteCluster
|
||||
cluster_event_list = senlinclient.v1.event:ListEvent
|
||||
cluster_event_show = senlinclient.v1.event:ShowEvent
|
||||
cluster_list = senlinclient.v1.cluster:ListCluster
|
||||
cluster_members_list = senlinclient.v1.cluster:ClusterNodeList
|
||||
cluster_members_add = senlinclient.v1.cluster:ClusterNodeAdd
|
||||
cluster_members_del = senlinclient.v1.cluster:ClusterNodeDel
|
||||
cluster_members_replace = senlinclient.v1.cluster:ClusterNodeReplace
|
||||
cluster_node_check = senlinclient.v1.node:CheckNode
|
||||
cluster_node_create = senlinclient.v1.node:CreateNode
|
||||
cluster_node_delete = senlinclient.v1.node:DeleteNode
|
||||
cluster_node_list = senlinclient.v1.node:ListNode
|
||||
cluster_node_recover = senlinclient.v1.node:RecoverNode
|
||||
cluster_node_show = senlinclient.v1.node:ShowNode
|
||||
cluster_node_update = senlinclient.v1.node:UpdateNode
|
||||
cluster_policy_attach = senlinclient.v1.cluster:ClusterPolicyAttach
|
||||
cluster_policy_binding_list = senlinclient.v1.cluster_policy:ClusterPolicyList
|
||||
cluster_policy_binding_show = senlinclient.v1.cluster_policy:ClusterPolicyShow
|
||||
cluster_policy_binding_update = senlinclient.v1.cluster_policy:ClusterPolicyUpdate
|
||||
cluster_policy_create = senlinclient.v1.policy:CreatePolicy
|
||||
cluster_policy_delete = senlinclient.v1.policy:DeletePolicy
|
||||
cluster_policy_detach = senlinclient.v1.cluster:ClusterPolicyDetach
|
||||
cluster_policy_list = senlinclient.v1.policy:ListPolicy
|
||||
cluster_policy_show = senlinclient.v1.policy:ShowPolicy
|
||||
cluster_policy_validate = senlinclient.v1.policy:ValidatePolicy
|
||||
cluster_policy_type_list = senlinclient.v1.policy_type:PolicyTypeList
|
||||
cluster_policy_type_show = senlinclient.v1.policy_type:PolicyTypeShow
|
||||
cluster_policy_update = senlinclient.v1.policy:UpdatePolicy
|
||||
cluster_profile_create = senlinclient.v1.profile:CreateProfile
|
||||
cluster_profile_delete = senlinclient.v1.profile:DeleteProfile
|
||||
cluster_profile_list = senlinclient.v1.profile:ListProfile
|
||||
cluster_profile_show = senlinclient.v1.profile:ShowProfile
|
||||
cluster_profile_type_list = senlinclient.v1.profile_type:ProfileTypeList
|
||||
cluster_profile_type_show = senlinclient.v1.profile_type:ProfileTypeShow
|
||||
cluster_profile_update = senlinclient.v1.profile:UpdateProfile
|
||||
cluster_profile_validate = senlinclient.v1.profile:ValidateProfile
|
||||
cluster_receiver_create = senlinclient.v1.receiver:CreateReceiver
|
||||
cluster_receiver_update = senlinclient.v1.receiver:UpdateReceiver
|
||||
cluster_receiver_delete = senlinclient.v1.receiver:DeleteReceiver
|
||||
cluster_receiver_list = senlinclient.v1.receiver:ListReceiver
|
||||
cluster_receiver_show = senlinclient.v1.receiver:ShowReceiver
|
||||
cluster_recover = senlinclient.v1.cluster:RecoverCluster
|
||||
cluster_resize = senlinclient.v1.cluster:ResizeCluster
|
||||
cluster_shrink = senlinclient.v1.cluster:ScaleInCluster
|
||||
cluster_expand = senlinclient.v1.cluster:ScaleOutCluster
|
||||
cluster_show = senlinclient.v1.cluster:ShowCluster
|
||||
cluster_update = senlinclient.v1.cluster:UpdateCluster
|
||||
cluster_collect = senlinclient.v1.cluster:ClusterCollect
|
||||
cluster_run = senlinclient.v1.cluster:ClusterRun
|
||||
cluster_service_list = senlinclient.v1.service:ListService
|
||||
|
||||
[global]
|
||||
setup-hooks =
|
||||
pbr.hooks.setup_hook
|
||||
|
||||
[build_sphinx]
|
||||
source-dir = doc/source
|
||||
build-dir = doc/build
|
||||
all_files = 1
|
||||
warning-is-error = 1
|
||||
|
||||
[upload_sphinx]
|
||||
upload-dir = doc/build/html
|
||||
|
||||
[wheel]
|
||||
universal = 1
|
||||
|
||||
[extract_messages]
|
||||
keywords = _ gettext ngettext l_ lazy_gettext
|
||||
mapping_file = babel.cfg
|
||||
output_file = senlinclient/locale/senlinclient.pot
|
||||
|
||||
[compile_catalog]
|
||||
directory = senlinclient/locale
|
||||
domain = senlinclient
|
||||
|
||||
[update_catalog]
|
||||
domain = senlinclient
|
||||
output_dir = senlinclient/locale
|
||||
input_file = senlinclient/locale/senlinclient.pot
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user