Retire Packaging Deb project repos
This commit is part of a series to retire the Packaging Deb project. Step 2 is to remove all content from the project repos, replacing it with a README notification where to find ongoing work, and how to recover the repo if needed at some future point (as in https://docs.openstack.org/infra/manual/drivers.html#retiring-a-project). Change-Id: I74ffe4a77b923f9c0dcbe8a244dab1412e64e3f3
This commit is contained in:
parent
ca1b923f98
commit
8dec06049c
@ -1,3 +0,0 @@
|
||||
[report]
|
||||
include = ironicclient/*
|
||||
omit = ironicclient/tests/functional/*
|
38
.gitignore
vendored
38
.gitignore
vendored
@ -1,38 +0,0 @@
|
||||
# Compiled files
|
||||
*.py[co]
|
||||
*.a
|
||||
*.o
|
||||
*.so
|
||||
|
||||
# Sphinx
|
||||
_build
|
||||
doc/source/api/
|
||||
|
||||
# Release notes
|
||||
releasenotes/build
|
||||
|
||||
# Packages/installer info
|
||||
*.egg
|
||||
*.egg-info
|
||||
dist
|
||||
build
|
||||
eggs
|
||||
parts
|
||||
var
|
||||
sdist
|
||||
develop-eggs
|
||||
.installed.cfg
|
||||
|
||||
# Other
|
||||
*.DS_Store
|
||||
.testrepository
|
||||
.tox
|
||||
.idea
|
||||
.venv
|
||||
.*.swp
|
||||
.coverage
|
||||
cover
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
*.sqlite
|
||||
test.conf
|
@ -1,4 +0,0 @@
|
||||
[gerrit]
|
||||
host=review.openstack.org
|
||||
port=29418
|
||||
project=openstack/python-ironicclient.git
|
@ -1,5 +0,0 @@
|
||||
[DEFAULT]
|
||||
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} ${PYTHON:-python} -m subunit.run discover -t ./ ${TESTS_DIR:-./ironicclient/tests/unit} $LISTOPT $IDOPTION
|
||||
|
||||
test_id_option=--load-list $IDFILE
|
||||
test_list_option=--list
|
@ -1,9 +0,0 @@
|
||||
If you would like to contribute to the development of OpenStack,
|
||||
you must follow the steps documented at:
|
||||
|
||||
http://docs.openstack.org/infra/manual/developers.html
|
||||
|
||||
More information on contributing can be found within the project
|
||||
documentation:
|
||||
|
||||
http://docs.openstack.org/developer/python-ironicclient/contributing.html
|
176
LICENSE
176
LICENSE
@ -1,176 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
14
README
Normal file
14
README
Normal file
@ -0,0 +1,14 @@
|
||||
This project is no longer maintained.
|
||||
|
||||
The contents of this repository are still available in the Git
|
||||
source code management system. To see the contents of this
|
||||
repository before it reached its end of life, please check out the
|
||||
previous commit with "git checkout HEAD^1".
|
||||
|
||||
For ongoing work on maintaining OpenStack packages in the Debian
|
||||
distribution, please see the Debian OpenStack packaging team at
|
||||
https://wiki.debian.org/OpenStack/.
|
||||
|
||||
For any further questions, please email
|
||||
openstack-dev@lists.openstack.org or join #openstack-dev on
|
||||
Freenode.
|
113
README.rst
113
README.rst
@ -1,113 +0,0 @@
|
||||
========================
|
||||
Team and repository tags
|
||||
========================
|
||||
|
||||
.. image:: https://governance.openstack.org/tc/badges/python-ironicclient.svg
|
||||
:target: https://governance.openstack.org/tc/reference/tags/index.html
|
||||
|
||||
.. Change things from this point on
|
||||
|
||||
Python bindings for the Ironic API
|
||||
==================================
|
||||
|
||||
This is a client for the OpenStack `Ironic
|
||||
<https://wiki.openstack.org/wiki/Ironic>`_ API. It provides a Python API (the
|
||||
``ironicclient`` module) and a command-line interface (``ironic``).
|
||||
|
||||
Development takes place via the usual OpenStack processes as outlined in the
|
||||
`developer guide <https://docs.openstack.org/infra/manual/developers.html>`_. The master
|
||||
repository is on `git.openstack.org
|
||||
<https://git.openstack.org/cgit/openstack/python-ironicclient>`_.
|
||||
|
||||
``python-ironicclient`` is licensed under the Apache License like the rest
|
||||
of OpenStack.
|
||||
|
||||
|
||||
.. contents:: Contents:
|
||||
:local:
|
||||
|
||||
Python API
|
||||
----------
|
||||
|
||||
Quick-start Example::
|
||||
>>> from ironicclient import client
|
||||
>>>
|
||||
>>> kwargs = {'os_auth_token': '3bcc3d3a03f44e3d8377f9247b0ad155',
|
||||
>>> 'ironic_url': 'http://ironic.example.org:6385/'}
|
||||
>>> ironic = client.get_client(1, **kwargs)
|
||||
|
||||
|
||||
Command-line API
|
||||
----------------
|
||||
|
||||
This package will install the ``ironic`` command line interface that you
|
||||
can use to interact with the ``ironic`` API.
|
||||
|
||||
In order to use the ``ironic`` CLI you'll need to provide your OpenStack
|
||||
tenant, username, password and authentication endpoint. You can do this with
|
||||
the ``--os-tenant-name``, ``--os-username``, ``--os-password`` and
|
||||
``--os-auth-url`` parameters, though it may be easier to set them
|
||||
as environment variables::
|
||||
|
||||
$ export OS_PROJECT_NAME=project
|
||||
$ export OS_USERNAME=user
|
||||
$ export OS_PASSWORD=pass
|
||||
$ export OS_AUTH_URL=http://auth.example.com:5000/v2.0
|
||||
|
||||
To use a specific Ironic API endpoint::
|
||||
|
||||
$ export IRONIC_URL=http://ironic.example.com:6385
|
||||
|
||||
An example of creating a basic node with the pxe_ipmitool driver::
|
||||
|
||||
$ ironic node-create -d pxe_ipmitool
|
||||
|
||||
An example of creating a port on a node::
|
||||
|
||||
$ ironic port-create -a AA:BB:CC:DD:EE:FF -n nodeUUID
|
||||
|
||||
An example of updating driver properties for a node::
|
||||
|
||||
$ ironic node-update nodeUUID add driver_info/ipmi_address=<IPaddress>
|
||||
$ ironic node-update nodeUUID add driver_info/ipmi_username=<username>
|
||||
$ ironic node-update nodeUUID add driver_info/ipmi_password=<password>
|
||||
|
||||
|
||||
For more information about the ``ironic`` command and the subcommands
|
||||
available, run::
|
||||
|
||||
$ ironic help
|
||||
|
||||
OpenStackClient Baremetal Plugin
|
||||
--------------------------------
|
||||
|
||||
In order to use Baremetal Plugin the OpenStackClient should be installed::
|
||||
|
||||
# pip install python-openstackclient
|
||||
|
||||
An example of creating a basic node with the agent_ipmitool driver::
|
||||
|
||||
$ openstack baremetal node create --driver agent_ipmitool
|
||||
|
||||
An example of creating a port on a node::
|
||||
|
||||
$ openstack baremetal port create --node <UUID> AA:BB:CC:DD:EE:FF
|
||||
|
||||
An example of updating driver properties for a node::
|
||||
|
||||
$ openstack baremetal node set --driver-info ipmi_address=<IPaddress> <UUID or name>
|
||||
|
||||
For more information about the ``openstack baremetal`` command and
|
||||
the subcommands available, run::
|
||||
|
||||
$ openstack help baremetal
|
||||
|
||||
* License: Apache License, Version 2.0
|
||||
* Documentation: https://docs.openstack.org/python-ironicclient/latest/
|
||||
* Source: https://git.openstack.org/cgit/openstack/python-ironicclient
|
||||
* Bugs: https://bugs.launchpad.net/python-ironicclient
|
||||
|
||||
Change logs with information about specific versions (or tags) are
|
||||
available at:
|
||||
|
||||
`<https://git.openstack.org/cgit/openstack/python-ironicclient/>`_.
|
@ -1,103 +0,0 @@
|
||||
.. _api_v1:
|
||||
|
||||
=======================
|
||||
ironicclient Python API
|
||||
=======================
|
||||
|
||||
The ironicclient python API lets you access ironic, the OpenStack
|
||||
Bare Metal Provisioning Service.
|
||||
|
||||
For example, to manipulate nodes, you interact with an
|
||||
`ironicclient.v1.node`_ object.
|
||||
You obtain access to nodes via attributes of the
|
||||
`ironicclient.v1.client.Client`_ object.
|
||||
|
||||
Usage
|
||||
=====
|
||||
|
||||
Get a Client object
|
||||
-------------------
|
||||
First, create an `ironicclient.v1.client.Client`_ instance by passing your
|
||||
credentials to `ironicclient.client.get_client()`_. By default, the
|
||||
Bare Metal Provisioning system is configured so that only administrators
|
||||
(users with 'admin' role) have access.
|
||||
|
||||
.. note::
|
||||
Explicit instantiation of `ironicclient.v1.client.Client`_ may cause
|
||||
errors since it doesn't verify provided arguments, using
|
||||
`ironicclient.client.get_client()` is preferred way to get client object.
|
||||
|
||||
There are two different sets of credentials that can be used::
|
||||
|
||||
* ironic endpoint and auth token
|
||||
* Identity Service (keystone) credentials
|
||||
|
||||
Using ironic endpoint and auth token
|
||||
....................................
|
||||
|
||||
An auth token and the ironic endpoint can be used to authenticate::
|
||||
|
||||
* os_auth_token: authentication token (from Identity Service)
|
||||
* ironic_url: ironic API endpoint, eg http://ironic.example.org:6385/v1
|
||||
|
||||
To create the client, you can use the API like so::
|
||||
|
||||
>>> from ironicclient import client
|
||||
>>>
|
||||
>>> kwargs = {'os_auth_token': '3bcc3d3a03f44e3d8377f9247b0ad155',
|
||||
>>> 'ironic_url': 'http://ironic.example.org:6385/'}
|
||||
>>> ironic = client.get_client(1, **kwargs)
|
||||
|
||||
Using Identity Service (keystone) credentials
|
||||
.............................................
|
||||
|
||||
These Identity Service credentials can be used to authenticate::
|
||||
|
||||
* os_username: name of user
|
||||
* os_password: user's password
|
||||
* os_auth_url: Identity Service endpoint for authorization
|
||||
* insecure: Boolean. If True, does not perform X.509 certificate
|
||||
validation when establishing SSL connection with identity
|
||||
service. default: False (optional)
|
||||
* os_tenant_{name|id}: name or ID of tenant
|
||||
|
||||
To create a client, you can use the API like so::
|
||||
|
||||
>>> from ironicclient import client
|
||||
>>>
|
||||
>>> kwargs = {'os_username': 'name',
|
||||
>>> 'os_password': 'password',
|
||||
>>> 'os_auth_url': 'http://keystone.example.org:5000/',
|
||||
>>> 'os_project_name': 'project'}
|
||||
>>> ironic = client.get_client(1, **kwargs)
|
||||
|
||||
Perform ironic operations
|
||||
-------------------------
|
||||
|
||||
Once you have an ironic `Client`_, you can perform various tasks::
|
||||
|
||||
>>> ironic.driver.list() # list of drivers
|
||||
>>> ironic.node.list() # list of nodes
|
||||
>>> ironic.node.get(node_uuid) # information about a particular node
|
||||
|
||||
When the `Client`_ needs to propagate an exception, it will usually
|
||||
raise an instance subclassed from
|
||||
`ironicclient.exc.BaseException`_ or `ironicclient.exc.ClientException`_.
|
||||
|
||||
Refer to the modules themselves, for more details.
|
||||
|
||||
ironicclient Modules
|
||||
====================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
modules <api/autoindex>
|
||||
|
||||
|
||||
.. _ironicclient.v1.node: api/ironicclient.v1.node.html#ironicclient.v1.node.Node
|
||||
.. _ironicclient.v1.client.Client: api/ironicclient.v1.client.html#ironicclient.v1.client.Client
|
||||
.. _Client: api/ironicclient.v1.client.html#ironicclient.v1.client.Client
|
||||
.. _ironicclient.client.get_client(): api/ironicclient.client.html#ironicclient.client.get_client
|
||||
.. _ironicclient.exc.BaseException: api/ironicclient.exc.html#ironicclient.exc.BaseException
|
||||
.. _ironicclient.exc.ClientException: api/ironicclient.exc.html#ironicclient.exc.ClientException
|
@ -1,8 +0,0 @@
|
||||
======================================
|
||||
python-ironicclient User Documentation
|
||||
======================================
|
||||
|
||||
.. toctree::
|
||||
|
||||
ironic_client
|
||||
osc_plugin_cli
|
@ -1,94 +0,0 @@
|
||||
==========================================
|
||||
Ironic Client Command-Line Interface (CLI)
|
||||
==========================================
|
||||
|
||||
.. program:: ironic
|
||||
.. highlight:: bash
|
||||
|
||||
SYNOPSIS
|
||||
========
|
||||
|
||||
:program:`ironic` [options] <command> [command-options]
|
||||
|
||||
:program:`ironic help`
|
||||
|
||||
:program:`ironic help` <command>
|
||||
|
||||
|
||||
DESCRIPTION
|
||||
===========
|
||||
|
||||
The :program:`ironic` command-line interface (CLI) interacts with the
|
||||
OpenStack Bare Metal Service (Ironic).
|
||||
|
||||
In order to use the CLI, you must provide your OpenStack username, password,
|
||||
project (historically called tenant), and auth endpoint. You can use
|
||||
configuration options :option:`--os-username`, :option:`--os-password`,
|
||||
:option:`--os-tenant-id` (or :option:`--os-tenant-name`),
|
||||
and :option:`--os-auth-url`, or set the corresponding
|
||||
environment variables::
|
||||
|
||||
$ export OS_USERNAME=user
|
||||
$ export OS_PASSWORD=password
|
||||
$ export OS_PROJECT_ID=b363706f891f48019483f8bd6503c54b # or OS_PROJECT_NAME
|
||||
$ export OS_PROJECT_NAME=project # or OS_PROJECT_ID
|
||||
$ export OS_AUTH_URL=http://auth.example.com:5000/v2.0
|
||||
|
||||
The command-line tool will attempt to reauthenticate using the provided
|
||||
credentials for every request. You can override this behavior by manually
|
||||
supplying an auth token using :option:`--ironic-url` and
|
||||
:option:`--os-auth-token`, or by setting the corresponding environment
|
||||
variables::
|
||||
|
||||
$ export IRONIC_URL=http://ironic.example.org:6385/
|
||||
$ export OS_AUTH_TOKEN=3bcc3d3a03f44e3d8377f9247b0ad155
|
||||
|
||||
Since Keystone can return multiple regions in the Service Catalog, you can
|
||||
specify the one you want with :option:`--os-region-name` or set the following
|
||||
environment variable. (It defaults to the first in the list returned.)
|
||||
::
|
||||
|
||||
export OS_REGION_NAME=region
|
||||
|
||||
Ironic CLI supports bash completion. The command-line tool can automatically
|
||||
fill partially typed commands. To use this feature, source the below file
|
||||
(available at
|
||||
https://git.openstack.org/cgit/openstack/python-ironicclient/tree/tools/ironic.bash_completion)
|
||||
to your terminal and then bash completion should work::
|
||||
|
||||
$ source ironic.bash_completion
|
||||
|
||||
To avoid doing this every time, add this to your ``.bashrc`` or copy the
|
||||
ironic.bash_completion file to the default bash completion scripts directory
|
||||
on your linux distribution.
|
||||
|
||||
OPTIONS
|
||||
=======
|
||||
|
||||
To get a list of available (sub)commands and options, run::
|
||||
|
||||
$ ironic help
|
||||
|
||||
To get usage and options of a command, run::
|
||||
|
||||
$ ironic help <command>
|
||||
|
||||
|
||||
EXAMPLES
|
||||
========
|
||||
|
||||
Get information about the node-create command::
|
||||
|
||||
$ ironic help node-create
|
||||
|
||||
Get a list of available drivers::
|
||||
|
||||
$ ironic driver-list
|
||||
|
||||
Enroll a node with "fake" deploy driver and "ipmitool" power driver::
|
||||
|
||||
$ ironic node-create -d fake_ipmitool -i ipmi_address=1.2.3.4
|
||||
|
||||
Get a list of nodes::
|
||||
|
||||
$ ironic node-list
|
@ -1,85 +0,0 @@
|
||||
=============================================
|
||||
OpenStack Client Command-Line Interface (CLI)
|
||||
=============================================
|
||||
|
||||
.. program:: openstack baremetal
|
||||
.. highlight:: bash
|
||||
|
||||
Synopsis
|
||||
========
|
||||
|
||||
:program:`openstack [options] baremetal` <command> [command-options]
|
||||
|
||||
:program:`openstack help baremetal` <command>
|
||||
|
||||
|
||||
Description
|
||||
===========
|
||||
|
||||
The OpenStack Client plugin interacts with the Bare Metal service
|
||||
through the ``openstack baremetal`` command line interface (CLI).
|
||||
|
||||
To use ``openstack`` CLI, the OpenStackClient should be installed::
|
||||
|
||||
# pip install python-openstackclient
|
||||
|
||||
To use the CLI, you must provide your OpenStack username, password,
|
||||
project, and auth endpoint. You can use configuration options
|
||||
:option:`--os-username`, :option:`--os-password`, :option:`--os-project-id`
|
||||
(or :option:`--os-project-name`), and :option:`--os-auth-url`,
|
||||
or set the corresponding environment variables::
|
||||
|
||||
$ export OS_USERNAME=user
|
||||
$ export OS_PASSWORD=password
|
||||
$ export OS_PROJECT_NAME=project # or OS_PROJECT_ID
|
||||
$ export OS_PROJECT_DOMAIN_ID=default
|
||||
$ export OS_USER_DOMAIN_ID=default
|
||||
$ export OS_IDENTITY_API_VERSION=3
|
||||
$ export OS_AUTH_URL=http://auth.example.com:5000/identity
|
||||
|
||||
This CLI is provided by python-openstackclient and osc-lib projects:
|
||||
|
||||
* https://git.openstack.org/openstack/python-openstackclient
|
||||
* https://git.openstack.org/openstack/osc-lib
|
||||
|
||||
|
||||
Getting help
|
||||
============
|
||||
|
||||
To get a list of available (sub)commands and options, run::
|
||||
|
||||
$ openstack help baremetal
|
||||
|
||||
To get usage and options of a command, run::
|
||||
|
||||
$ openstack help baremetal <sub-command>
|
||||
|
||||
|
||||
Examples
|
||||
========
|
||||
|
||||
Get information about the openstack baremetal node create command::
|
||||
|
||||
$ openstack help baremetal node create
|
||||
|
||||
Get a list of available drivers::
|
||||
|
||||
$ openstack baremetal driver list
|
||||
|
||||
Enroll a node with "agent_ipmitool" driver::
|
||||
|
||||
$ openstack baremetal node create --driver agent_ipmitool --driver-info ipmi_address=1.2.3.4
|
||||
|
||||
Get a list of nodes::
|
||||
|
||||
$ openstack baremetal node list
|
||||
|
||||
The baremetal API version can be specified via:
|
||||
|
||||
* environment variable OS_BAREMETAL_API_VERSION::
|
||||
|
||||
$ export OS_BAREMETAL_API_VERSION=1.25
|
||||
|
||||
* or optional command line argument --os-baremetal-api-version::
|
||||
|
||||
$ openstack baremetal port group list --os-baremetal-api-version 1.25
|
@ -1,76 +0,0 @@
|
||||
# -- General configuration ----------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['sphinx.ext.autodoc',
|
||||
'sphinx.ext.viewcode',
|
||||
'openstackdocstheme',
|
||||
]
|
||||
|
||||
# openstackdocstheme options
|
||||
repository_name = 'openstack/python-ironicclient'
|
||||
bug_project = 'python-ironicclient'
|
||||
bug_tag = ''
|
||||
|
||||
# autodoc generation is a bit aggressive and a nuisance when doing heavy
|
||||
# text edit cycles.
|
||||
# execute "export SPHINX_DEBUG=1" in your terminal to disable
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'python-ironicclient'
|
||||
copyright = u'OpenStack Foundation'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
modindex_common_prefix = ['ironicclient.']
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = True
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of glob-style patterns that should be excluded when looking for
|
||||
# source files. They are matched against the source file names relative to the
|
||||
# source directory, using slashes as directory separators on all platforms.
|
||||
exclude_patterns = ['api/ironicclient.tests.functional.*']
|
||||
|
||||
# -- Options for HTML output --------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
#html_theme_path = ["."]
|
||||
#html_theme = '_theme'
|
||||
#html_static_path = ['_static']
|
||||
html_theme = 'openstackdocs'
|
||||
|
||||
html_last_updated_fmt = '%Y-%m-%d %H:%M'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = '%sdoc' % project
|
||||
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass
|
||||
# [howto/manual]).
|
||||
latex_documents = [
|
||||
(
|
||||
'index',
|
||||
'%s.tex' % project,
|
||||
u'%s Documentation' % project,
|
||||
u'OpenStack LLC',
|
||||
'manual'
|
||||
),
|
||||
]
|
@ -1,62 +0,0 @@
|
||||
.. _contributing:
|
||||
|
||||
===================================
|
||||
Contributing to python-ironicclient
|
||||
===================================
|
||||
|
||||
If you're interested in contributing to the python-ironicclient project,
|
||||
the following will help get you started.
|
||||
|
||||
#openstack-ironic on Freenode IRC Network
|
||||
-----------------------------------------
|
||||
There is a very active chat channel at irc://freenode.net/#openstack-ironic.
|
||||
This is usually the best place to ask questions and find your way around.
|
||||
IRC stands for Internet Relay Chat and it is a way to chat online in real
|
||||
time. You can ask a question and come back later to read the answer in the
|
||||
log files. Logs for the #openstack-ironic IRC channel are stored at
|
||||
http://eavesdrop.openstack.org/irclogs/%23openstack-ironic/.
|
||||
|
||||
Contributor License Agreement
|
||||
-----------------------------
|
||||
|
||||
.. index::
|
||||
single: license; agreement
|
||||
|
||||
In order to contribute to the python-ironicclient project, you need to have
|
||||
signed OpenStack's contributor's agreement.
|
||||
|
||||
.. seealso::
|
||||
|
||||
* https://docs.openstack.org/infra/manual/developers.html
|
||||
* https://wiki.openstack.org/wiki/CLA
|
||||
|
||||
LaunchPad Project
|
||||
-----------------
|
||||
|
||||
Most of the tools used for OpenStack depend on a launchpad.net ID for
|
||||
authentication. After signing up for a launchpad account, join the
|
||||
"openstack" team to have access to the mailing list and receive
|
||||
notifications of important events.
|
||||
|
||||
.. seealso::
|
||||
|
||||
* https://launchpad.net
|
||||
* https://launchpad.net/python-ironicclient
|
||||
* https://launchpad.net/~openstack
|
||||
|
||||
|
||||
Project Hosting Details
|
||||
-----------------------
|
||||
|
||||
Bug tracker
|
||||
https://launchpad.net/python-ironicclient
|
||||
|
||||
Mailing list (prefix subjects with ``[ironic]`` for faster responses)
|
||||
http://lists.openstack.org/cgi-bin/mailman/listinfo/openstack-dev
|
||||
|
||||
Code Hosting
|
||||
https://git.openstack.org/cgit/openstack/python-ironicclient
|
||||
|
||||
Code Review
|
||||
https://review.openstack.org/#/q/status:open+project:openstack/python-ironicclient,n,z
|
||||
|
@ -1,8 +0,0 @@
|
||||
=============================================
|
||||
python-ironicclient Contributor Documentation
|
||||
=============================================
|
||||
|
||||
.. toctree::
|
||||
|
||||
contributing
|
||||
testing
|
@ -1,67 +0,0 @@
|
||||
.. _testing:
|
||||
|
||||
=======
|
||||
Testing
|
||||
=======
|
||||
|
||||
Python Guideline Enforcement
|
||||
............................
|
||||
|
||||
All code has to pass the pep8 style guideline to merge into OpenStack, to
|
||||
validate the code against these guidelines you can run::
|
||||
|
||||
$ tox -e pep8
|
||||
|
||||
Unit Testing
|
||||
............
|
||||
|
||||
It is strongly encouraged to run the unit tests locally under one or more
|
||||
test environments prior to submitting a patch. To run all the recommended
|
||||
environments sequentially and pep8 style guideline run::
|
||||
|
||||
$ tox
|
||||
|
||||
You can also selectively pick specific test environments by listing your
|
||||
chosen environments after a -e flag::
|
||||
|
||||
$ tox -e py35,py27,pep8,pypy
|
||||
|
||||
.. note::
|
||||
Tox sets up virtual environment and installs all necessary dependencies.
|
||||
Sharing the environment with devstack testing is not recommended due to
|
||||
conflicting configuration with system dependencies.
|
||||
|
||||
Functional Testing
|
||||
..................
|
||||
|
||||
Functional testing assumes the existence of the script run_functional.sh in the
|
||||
python-ironicclient/tools directory. The script run_functional.sh generates
|
||||
test.conf file. To run functional tests just run ./run_functional.sh.
|
||||
|
||||
Also, the test.conf file could be created manually or generated from
|
||||
environment variables. It assumes the existence of an openstack
|
||||
cloud installation along with admin credentials. The test.conf file lives in
|
||||
ironicclient/tests/functional/ directory. To run functional tests in that way
|
||||
create test.conf manually and run::
|
||||
|
||||
$ tox -e functional
|
||||
|
||||
An example test.conf file::
|
||||
|
||||
[functional]
|
||||
api_version = 1
|
||||
os_auth_url=http://192.168.0.2:5000/v2.0/
|
||||
os_username=admin
|
||||
os_password=admin
|
||||
os_project_name=admin
|
||||
|
||||
If you are testing ironic in standalone mode, only the parameters
|
||||
'auth_strategy', 'os_auth_token' and 'ironic_url' are required;
|
||||
all others will be ignored.
|
||||
|
||||
An example test.conf file for standalone host::
|
||||
|
||||
[functional]
|
||||
auth_strategy = noauth
|
||||
os_auth_token = fake
|
||||
ironic_url = http://10.0.0.2:6385
|
@ -1,29 +0,0 @@
|
||||
===========================================
|
||||
Python Bindings to the OpenStack Ironic API
|
||||
===========================================
|
||||
|
||||
This is a client for OpenStack `Ironic`_ API. There's a Python API
|
||||
(the `ironicclient` modules), and a command-line interface (installed as
|
||||
`ironic`).
|
||||
|
||||
Contents
|
||||
========
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
api_v1
|
||||
cli/index
|
||||
user/create_command
|
||||
contributor/index
|
||||
Release Notes <http://docs.openstack.org/releasenotes/python-ironicclient/>
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
||||
|
||||
.. _Ironic: https://wiki.openstack.org/wiki/Ironic
|
@ -1,189 +0,0 @@
|
||||
===================================================
|
||||
Creating the Bare Metal service resources from file
|
||||
===================================================
|
||||
|
||||
It is possible to create a set of resources using their descriptions in JSON
|
||||
or YAML format. It can be done in one of three ways:
|
||||
|
||||
1. Using ironic CLI's ``ironic create`` command::
|
||||
|
||||
$ ironic help create
|
||||
usage: ironic create <file> [<file> ...]
|
||||
|
||||
Create baremetal resources (chassis, nodes, port groups and ports). The
|
||||
resources may be described in one or more JSON or YAML files. If any file
|
||||
cannot be validated, no resources are created. An attempt is made to
|
||||
create all the resources; those that could not be created are skipped
|
||||
(with a corresponding error message).
|
||||
|
||||
Positional arguments:
|
||||
<file> File (.yaml or .json) containing descriptions of the resources
|
||||
to create. Can be specified multiple times.
|
||||
|
||||
2. Using openstackclient plugin command ``openstack baremetal create``::
|
||||
|
||||
$ openstack -h baremetal create
|
||||
usage: openstack [-h] [-f {json,shell,table,value,yaml}] [-c COLUMN]
|
||||
[--max-width <integer>] [--noindent] [--prefix PREFIX]
|
||||
[--chassis-uuid <chassis>] [--driver-info <key=value>]
|
||||
[--property <key=value>] [--extra <key=value>]
|
||||
[--uuid <uuid>] [--name <name>]
|
||||
[--network-interface <network_interface>]
|
||||
[--resource-class <resource_class>] [--driver <driver>]
|
||||
[<file> [<file> ...]]
|
||||
|
||||
Create resources from files or Register a new node (DEPRECATED). Create
|
||||
resources from files (by only specifying the files) or register a new
|
||||
node by specifying one or more optional arguments (DEPRECATED, use
|
||||
'openstack baremetal node create' instead).
|
||||
|
||||
positional arguments:
|
||||
<file> File (.yaml or .json) containing descriptions of
|
||||
the resources to create. Can be specified
|
||||
multiple times. If you want to create resources,
|
||||
only specify the files. Do not specify any of
|
||||
the optional arguments.
|
||||
|
||||
.. note::
|
||||
If the ``--driver`` argument is passed in, the behaviour of the command
|
||||
is the same as ``openstack baremetal node create``, and positional
|
||||
arguments are ignored. If it is not provided, the command does resource
|
||||
creation from file(s), and only positional arguments will be taken into
|
||||
account.
|
||||
|
||||
3. Programmatically using the Python API:
|
||||
|
||||
.. autofunction:: ironicclient.v1.create_resources.create_resources
|
||||
:noindex:
|
||||
|
||||
File containing Resource Descriptions
|
||||
=====================================
|
||||
|
||||
The resources to be created can be described either in JSON or YAML. A file
|
||||
ending with ``.json`` is assumed to contain valid JSON, and a file ending with
|
||||
``.yaml`` is assumed to contain valid YAML. Specifying a file with any other
|
||||
extension leads to an error.
|
||||
|
||||
The resources that can be created are chassis, nodes, port groups and ports.
|
||||
A chassis can contain nodes (and resources of nodes) definitions nested under
|
||||
``"nodes"`` key. A node can contain port groups definitions nested under
|
||||
``"portgroups"``, and ports definitions under ``"ports"`` keys. Ports can be
|
||||
also nested under port groups in ``"ports"`` key.
|
||||
|
||||
The schema used to validate the supplied data is the following::
|
||||
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"description": "Schema for ironic resources file",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"chassis": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"nodes": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": False
|
||||
}
|
||||
|
||||
More detailed description of the creation process can be seen in the following
|
||||
sections.
|
||||
|
||||
Examples
|
||||
========
|
||||
|
||||
Here is an example of the JSON file that can be passed to the ``create``
|
||||
command::
|
||||
|
||||
{
|
||||
"chassis": [
|
||||
{
|
||||
"description": "chassis 3 in row 23",
|
||||
"nodes": [
|
||||
{
|
||||
"name": "node-3",
|
||||
"driver": "agent_ipmitool",
|
||||
"portgroups": [
|
||||
{
|
||||
"name": "switch.cz7882.ports.1-2",
|
||||
"ports": [
|
||||
{
|
||||
"address": "ff:00:00:00:00:00"
|
||||
},
|
||||
{
|
||||
"address": "ff:00:00:00:00:01"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"ports": [
|
||||
{
|
||||
"address": "00:00:00:00:00:02"
|
||||
},
|
||||
{
|
||||
"address": "00:00:00:00:00:03"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "node-4",
|
||||
"driver": "agent_ipmitool",
|
||||
"ports": [
|
||||
{
|
||||
"address": "00:00:00:00:00:04"
|
||||
},
|
||||
{
|
||||
"address": "00:00:00:00:00:01"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"nodes": [
|
||||
{
|
||||
"name": "node-5",
|
||||
"driver": "pxe_ipmitool",
|
||||
"chassis_uuid": "74d93e6e-7384-4994-a614-fd7b399b0785",
|
||||
"ports": [
|
||||
{
|
||||
"address": "00:00:00:00:00:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "node-6",
|
||||
"driver": "pxe_ipmitool"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Creation Process
|
||||
================
|
||||
|
||||
#. The client deserializes the files' contents and validates that the top-level
|
||||
dictionary in each of them contains only "chassis" and/or "nodes" keys,
|
||||
and their values are lists. The creation process is aborted if any failure
|
||||
is encountered in this stage. The rest of the validation is done by the
|
||||
ironic-api service.
|
||||
|
||||
#. Each resource is created via issuing a POST request (with the resource's
|
||||
dictionary representation in the body) to the ironic-api service. In the
|
||||
case of nested resources (``"nodes"`` key inside chassis, ``"portgroups"``
|
||||
key inside nodes, ``"ports"`` key inside nodes or portgroups), the top-level
|
||||
resource is created first, followed by the sub-resources. For example, if a
|
||||
chassis contains a list of nodes, the chassis will be created first followed
|
||||
by the creation of each node. The same is true for ports and port groups
|
||||
described within nodes.
|
||||
|
||||
#. If a resource could not be created, it does not stop the entire process.
|
||||
Any sub-resources of the failed resource will not be created, but otherwise,
|
||||
the rest of the resources will be created if possible. Any failed resources
|
||||
will be mentioned in the response.
|
@ -1,28 +0,0 @@
|
||||
# Copyright 2013 Hewlett-Packard Development Company, L.P.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import pbr.version
|
||||
|
||||
from ironicclient import client
|
||||
from ironicclient import exc as exceptions
|
||||
|
||||
|
||||
__version__ = pbr.version.VersionInfo('python-ironicclient').version_string()
|
||||
|
||||
__all__ = (
|
||||
'client',
|
||||
'exc',
|
||||
'exceptions',
|
||||
)
|
@ -1,153 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from keystoneauth1 import loading as kaloading
|
||||
from oslo_utils import importutils
|
||||
|
||||
from ironicclient.common.i18n import _
|
||||
from ironicclient import exc
|
||||
|
||||
|
||||
def get_client(api_version, os_auth_token=None, ironic_url=None,
|
||||
os_username=None, os_password=None, os_auth_url=None,
|
||||
os_project_id=None, os_project_name=None, os_tenant_id=None,
|
||||
os_tenant_name=None, os_region_name=None,
|
||||
os_user_domain_id=None, os_user_domain_name=None,
|
||||
os_project_domain_id=None, os_project_domain_name=None,
|
||||
os_service_type=None, os_endpoint_type=None,
|
||||
insecure=None, timeout=None, os_cacert=None, ca_file=None,
|
||||
os_cert=None, cert_file=None, os_key=None, key_file=None,
|
||||
os_ironic_api_version=None, max_retries=None,
|
||||
retry_interval=None, session=None, **ignored_kwargs):
|
||||
"""Get an authenticated client, based on the credentials.
|
||||
|
||||
:param api_version: the API version to use. Valid value: '1'.
|
||||
:param os_auth_token: pre-existing token to re-use
|
||||
:param ironic_url: ironic API endpoint
|
||||
:param os_username: name of a user
|
||||
:param os_password: user's password
|
||||
:param os_auth_url: endpoint to authenticate against
|
||||
:param os_tenant_name: name of a tenant (deprecated in favour of
|
||||
os_project_name)
|
||||
:param os_tenant_id: ID of a tenant (deprecated in favour of
|
||||
os_project_id)
|
||||
:param os_project_name: name of a project
|
||||
:param os_project_id: ID of a project
|
||||
:param os_region_name: name of a keystone region
|
||||
:param os_user_domain_name: name of a domain the user belongs to
|
||||
:param os_user_domain_id: ID of a domain the user belongs to
|
||||
:param os_project_domain_name: name of a domain the project belongs to
|
||||
:param os_project_domain_id: ID of a domain the project belongs to
|
||||
:param os_service_type: the type of service to lookup the endpoint for
|
||||
:param os_endpoint_type: the type (exposure) of the endpoint
|
||||
:param insecure: allow insecure SSL (no cert verification)
|
||||
:param timeout: allows customization of the timeout for client HTTP
|
||||
requests
|
||||
:param os_cacert: path to cacert file
|
||||
:param ca_file: path to cacert file, deprecated in favour of os_cacert
|
||||
:param os_cert: path to cert file
|
||||
:param cert_file: path to cert file, deprecated in favour of os_cert
|
||||
:param os_key: path to key file
|
||||
:param key_file: path to key file, deprecated in favour of os_key
|
||||
:param os_ironic_api_version: ironic API version to use
|
||||
:param max_retries: Maximum number of retries in case of conflict error
|
||||
:param retry_interval: Amount of time (in seconds) between retries in case
|
||||
of conflict error
|
||||
:param session: Keystone session to use
|
||||
:param ignored_kwargs: all the other params that are passed. Left for
|
||||
backwards compatibility. They are ignored.
|
||||
"""
|
||||
os_service_type = os_service_type or 'baremetal'
|
||||
os_endpoint_type = os_endpoint_type or 'publicURL'
|
||||
project_id = (os_project_id or os_tenant_id)
|
||||
project_name = (os_project_name or os_tenant_name)
|
||||
kwargs = {
|
||||
'os_ironic_api_version': os_ironic_api_version,
|
||||
'max_retries': max_retries,
|
||||
'retry_interval': retry_interval,
|
||||
}
|
||||
endpoint = ironic_url
|
||||
cacert = os_cacert or ca_file
|
||||
cert = os_cert or cert_file
|
||||
key = os_key or key_file
|
||||
if os_auth_token and endpoint:
|
||||
kwargs.update({
|
||||
'token': os_auth_token,
|
||||
'insecure': insecure,
|
||||
'ca_file': cacert,
|
||||
'cert_file': cert,
|
||||
'key_file': key,
|
||||
'timeout': timeout,
|
||||
})
|
||||
elif os_auth_url:
|
||||
auth_type = 'password'
|
||||
auth_kwargs = {
|
||||
'auth_url': os_auth_url,
|
||||
'project_id': project_id,
|
||||
'project_name': project_name,
|
||||
'user_domain_id': os_user_domain_id,
|
||||
'user_domain_name': os_user_domain_name,
|
||||
'project_domain_id': os_project_domain_id,
|
||||
'project_domain_name': os_project_domain_name,
|
||||
}
|
||||
if os_username and os_password:
|
||||
auth_kwargs.update({
|
||||
'username': os_username,
|
||||
'password': os_password,
|
||||
})
|
||||
elif os_auth_token:
|
||||
auth_type = 'token'
|
||||
auth_kwargs.update({
|
||||
'token': os_auth_token,
|
||||
})
|
||||
# Create new session only if it was not passed in
|
||||
if not session:
|
||||
loader = kaloading.get_plugin_loader(auth_type)
|
||||
auth_plugin = loader.load_from_options(**auth_kwargs)
|
||||
# Let keystoneauth do the necessary parameter conversions
|
||||
session = kaloading.session.Session().load_from_options(
|
||||
auth=auth_plugin, insecure=insecure, cacert=cacert,
|
||||
cert=cert, key=key, timeout=timeout,
|
||||
)
|
||||
|
||||
exception_msg = _('Must provide Keystone credentials or user-defined '
|
||||
'endpoint and token')
|
||||
if not endpoint:
|
||||
if session:
|
||||
try:
|
||||
# Pass the endpoint, it will be used to get hostname
|
||||
# and port that will be used for API version caching. It will
|
||||
# be also set as endpoint_override.
|
||||
endpoint = session.get_endpoint(
|
||||
service_type=os_service_type,
|
||||
interface=os_endpoint_type,
|
||||
region_name=os_region_name
|
||||
)
|
||||
except Exception as e:
|
||||
raise exc.AmbiguousAuthSystem(
|
||||
_('%(message)s, error was: %(error)s') %
|
||||
{'message': exception_msg, 'error': e})
|
||||
else:
|
||||
# Neither session, nor valid auth parameters provided
|
||||
raise exc.AmbiguousAuthSystem(exception_msg)
|
||||
|
||||
# Always pass the session
|
||||
kwargs['session'] = session
|
||||
|
||||
return Client(api_version, endpoint, **kwargs)
|
||||
|
||||
|
||||
def Client(version, *args, **kwargs):
|
||||
module = importutils.import_versioned_module('ironicclient',
|
||||
version, 'client')
|
||||
client_class = getattr(module, 'Client')
|
||||
return client_class(*args, **kwargs)
|
@ -1,517 +0,0 @@
|
||||
# Copyright 2010 Jacob Kaplan-Moss
|
||||
# Copyright 2011 OpenStack Foundation
|
||||
# Copyright 2012 Grid Dynamics
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Base utilities to build API operation managers and objects on top of.
|
||||
"""
|
||||
|
||||
|
||||
# E1102: %s is not callable
|
||||
# pylint: disable=E1102
|
||||
|
||||
import abc
|
||||
import copy
|
||||
|
||||
from oslo_utils import strutils
|
||||
import six
|
||||
from six.moves import http_client
|
||||
from six.moves.urllib import parse
|
||||
|
||||
from ironicclient.common.apiclient import exceptions
|
||||
from ironicclient.common.i18n import _
|
||||
|
||||
|
||||
def getid(obj):
|
||||
"""Return id if argument is a Resource.
|
||||
|
||||
Abstracts the common pattern of allowing both an object or an object's ID
|
||||
(UUID) as a parameter when dealing with relationships.
|
||||
"""
|
||||
try:
|
||||
if obj.uuid:
|
||||
return obj.uuid
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
return obj.id
|
||||
except AttributeError:
|
||||
return obj
|
||||
|
||||
|
||||
# TODO(aababilov): call run_hooks() in HookableMixin's child classes
|
||||
class HookableMixin(object):
|
||||
"""Mixin so classes can register and run hooks."""
|
||||
_hooks_map = {}
|
||||
|
||||
@classmethod
|
||||
def add_hook(cls, hook_type, hook_func):
|
||||
"""Add a new hook of specified type.
|
||||
|
||||
:param cls: class that registers hooks
|
||||
:param hook_type: hook type, e.g., '__pre_parse_args__'
|
||||
:param hook_func: hook function
|
||||
"""
|
||||
if hook_type not in cls._hooks_map:
|
||||
cls._hooks_map[hook_type] = []
|
||||
|
||||
cls._hooks_map[hook_type].append(hook_func)
|
||||
|
||||
@classmethod
|
||||
def run_hooks(cls, hook_type, *args, **kwargs):
|
||||
"""Run all hooks of specified type.
|
||||
|
||||
:param cls: class that registers hooks
|
||||
:param hook_type: hook type, e.g., '__pre_parse_args__'
|
||||
:param args: args to be passed to every hook function
|
||||
:param kwargs: kwargs to be passed to every hook function
|
||||
"""
|
||||
hook_funcs = cls._hooks_map.get(hook_type) or []
|
||||
for hook_func in hook_funcs:
|
||||
hook_func(*args, **kwargs)
|
||||
|
||||
|
||||
class BaseManager(HookableMixin):
|
||||
"""Basic manager type providing common operations.
|
||||
|
||||
Managers interact with a particular type of API (servers, flavors, images,
|
||||
etc.) and provide CRUD operations for them.
|
||||
"""
|
||||
resource_class = None
|
||||
|
||||
def __init__(self, client):
|
||||
"""Initializes BaseManager with `client`.
|
||||
|
||||
:param client: instance of BaseClient descendant for HTTP requests
|
||||
"""
|
||||
super(BaseManager, self).__init__()
|
||||
self.client = client
|
||||
|
||||
def _list(self, url, response_key=None, obj_class=None, json=None):
|
||||
"""List the collection.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers'
|
||||
:param response_key: the key to be looked up in response dictionary,
|
||||
e.g., 'servers'. If response_key is None - all response body
|
||||
will be used.
|
||||
:param obj_class: class for constructing the returned objects
|
||||
(self.resource_class will be used by default)
|
||||
:param json: data that will be encoded as JSON and passed in POST
|
||||
request (GET will be sent by default)
|
||||
"""
|
||||
if json:
|
||||
body = self.client.post(url, json=json).json()
|
||||
else:
|
||||
body = self.client.get(url).json()
|
||||
|
||||
if obj_class is None:
|
||||
obj_class = self.resource_class
|
||||
|
||||
data = body[response_key] if response_key is not None else body
|
||||
# NOTE(ja): keystone returns values as list as {'values': [ ... ]}
|
||||
# unlike other services which just return the list...
|
||||
try:
|
||||
data = data['values']
|
||||
except (KeyError, TypeError):
|
||||
pass
|
||||
|
||||
return [obj_class(self, res, loaded=True) for res in data if res]
|
||||
|
||||
def _get(self, url, response_key=None):
|
||||
"""Get an object from collection.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers'
|
||||
:param response_key: the key to be looked up in response dictionary,
|
||||
e.g., 'server'. If response_key is None - all response body
|
||||
will be used.
|
||||
"""
|
||||
body = self.client.get(url).json()
|
||||
data = body[response_key] if response_key is not None else body
|
||||
return self.resource_class(self, data, loaded=True)
|
||||
|
||||
def _head(self, url):
|
||||
"""Retrieve request headers for an object.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers'
|
||||
"""
|
||||
resp = self.client.head(url)
|
||||
return resp.status_code == http_client.NO_CONTENT
|
||||
|
||||
def _post(self, url, json, response_key=None, return_raw=False):
|
||||
"""Create an object.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers'
|
||||
:param json: data that will be encoded as JSON and passed in POST
|
||||
request (GET will be sent by default)
|
||||
:param response_key: the key to be looked up in response dictionary,
|
||||
e.g., 'server'. If response_key is None - all response body
|
||||
will be used.
|
||||
:param return_raw: flag to force returning raw JSON instead of
|
||||
Python object of self.resource_class
|
||||
"""
|
||||
body = self.client.post(url, json=json).json()
|
||||
data = body[response_key] if response_key is not None else body
|
||||
if return_raw:
|
||||
return data
|
||||
return self.resource_class(self, data)
|
||||
|
||||
def _put(self, url, json=None, response_key=None):
|
||||
"""Update an object with PUT method.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers'
|
||||
:param json: data that will be encoded as JSON and passed in POST
|
||||
request (GET will be sent by default)
|
||||
:param response_key: the key to be looked up in response dictionary,
|
||||
e.g., 'servers'. If response_key is None - all response body
|
||||
will be used.
|
||||
"""
|
||||
resp = self.client.put(url, json=json)
|
||||
# PUT requests may not return a body
|
||||
if resp.content:
|
||||
body = resp.json()
|
||||
if response_key is not None:
|
||||
return self.resource_class(self, body[response_key])
|
||||
else:
|
||||
return self.resource_class(self, body)
|
||||
|
||||
def _patch(self, url, json=None, response_key=None):
|
||||
"""Update an object with PATCH method.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers'
|
||||
:param json: data that will be encoded as JSON and passed in POST
|
||||
request (GET will be sent by default)
|
||||
:param response_key: the key to be looked up in response dictionary,
|
||||
e.g., 'servers'. If response_key is None - all response body
|
||||
will be used.
|
||||
"""
|
||||
body = self.client.patch(url, json=json).json()
|
||||
if response_key is not None:
|
||||
return self.resource_class(self, body[response_key])
|
||||
else:
|
||||
return self.resource_class(self, body)
|
||||
|
||||
def _delete(self, url):
|
||||
"""Delete an object.
|
||||
|
||||
:param url: a partial URL, e.g., '/servers/my-server'
|
||||
"""
|
||||
return self.client.delete(url)
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ManagerWithFind(BaseManager):
|
||||
"""Manager with additional `find()`/`findall()` methods."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def list(self):
|
||||
pass
|
||||
|
||||
def find(self, **kwargs):
|
||||
"""Find a single item with attributes matching ``**kwargs``.
|
||||
|
||||
This isn't very efficient: it loads the entire list then filters on
|
||||
the Python side.
|
||||
"""
|
||||
matches = self.findall(**kwargs)
|
||||
num_matches = len(matches)
|
||||
if num_matches == 0:
|
||||
msg = _("No %(name)s matching %(args)s.") % {
|
||||
'name': self.resource_class.__name__,
|
||||
'args': kwargs
|
||||
}
|
||||
raise exceptions.NotFound(msg)
|
||||
elif num_matches > 1:
|
||||
raise exceptions.NoUniqueMatch()
|
||||
else:
|
||||
return matches[0]
|
||||
|
||||
def findall(self, **kwargs):
|
||||
"""Find all items with attributes matching ``**kwargs``.
|
||||
|
||||
This isn't very efficient: it loads the entire list then filters on
|
||||
the Python side.
|
||||
"""
|
||||
found = []
|
||||
searches = kwargs.items()
|
||||
|
||||
for obj in self.list():
|
||||
try:
|
||||
if all(getattr(obj, attr) == value
|
||||
for (attr, value) in searches):
|
||||
found.append(obj)
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
return found
|
||||
|
||||
|
||||
class CrudManager(BaseManager):
|
||||
"""Base manager class for manipulating entities.
|
||||
|
||||
Children of this class are expected to define a `collection_key` and `key`.
|
||||
|
||||
- `collection_key`: Usually a plural noun by convention (e.g. `entities`);
|
||||
used to refer collections in both URL's (e.g. `/v3/entities`) and JSON
|
||||
objects containing a list of member resources (e.g. `{'entities': [{},
|
||||
{}, {}]}`).
|
||||
- `key`: Usually a singular noun by convention (e.g. `entity`); used to
|
||||
refer to an individual member of the collection.
|
||||
|
||||
"""
|
||||
collection_key = None
|
||||
key = None
|
||||
|
||||
def build_url(self, base_url=None, **kwargs):
|
||||
"""Builds a resource URL for the given kwargs.
|
||||
|
||||
Given an example collection where `collection_key = 'entities'` and
|
||||
`key = 'entity'`, the following URL's could be generated.
|
||||
|
||||
By default, the URL will represent a collection of entities, e.g.::
|
||||
|
||||
/entities
|
||||
|
||||
If kwargs contains an `entity_id`, then the URL will represent a
|
||||
specific member, e.g.::
|
||||
|
||||
/entities/{entity_id}
|
||||
|
||||
:param base_url: if provided, the generated URL will be appended to it
|
||||
"""
|
||||
url = base_url if base_url is not None else ''
|
||||
|
||||
url += '/%s' % self.collection_key
|
||||
|
||||
# do we have a specific entity?
|
||||
entity_id = kwargs.get('%s_id' % self.key)
|
||||
if entity_id is not None:
|
||||
url += '/%s' % entity_id
|
||||
|
||||
return url
|
||||
|
||||
def _filter_kwargs(self, kwargs):
|
||||
"""Drop null values and handle ids."""
|
||||
for key, ref in kwargs.copy().items():
|
||||
if ref is None:
|
||||
kwargs.pop(key)
|
||||
else:
|
||||
if isinstance(ref, Resource):
|
||||
kwargs.pop(key)
|
||||
kwargs['%s_id' % key] = getid(ref)
|
||||
return kwargs
|
||||
|
||||
def create(self, **kwargs):
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
return self._post(
|
||||
self.build_url(**kwargs),
|
||||
{self.key: kwargs},
|
||||
self.key)
|
||||
|
||||
def get(self, **kwargs):
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
return self._get(
|
||||
self.build_url(**kwargs),
|
||||
self.key)
|
||||
|
||||
def head(self, **kwargs):
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
return self._head(self.build_url(**kwargs))
|
||||
|
||||
def list(self, base_url=None, **kwargs):
|
||||
"""List the collection.
|
||||
|
||||
:param base_url: if provided, the generated URL will be appended to it
|
||||
"""
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
|
||||
return self._list(
|
||||
'%(base_url)s%(query)s' % {
|
||||
'base_url': self.build_url(base_url=base_url, **kwargs),
|
||||
'query': '?%s' % parse.urlencode(kwargs) if kwargs else '',
|
||||
},
|
||||
self.collection_key)
|
||||
|
||||
def put(self, base_url=None, **kwargs):
|
||||
"""Update an element.
|
||||
|
||||
:param base_url: if provided, the generated URL will be appended to it
|
||||
"""
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
|
||||
return self._put(self.build_url(base_url=base_url, **kwargs))
|
||||
|
||||
def update(self, **kwargs):
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
params = kwargs.copy()
|
||||
params.pop('%s_id' % self.key)
|
||||
|
||||
return self._patch(
|
||||
self.build_url(**kwargs),
|
||||
{self.key: params},
|
||||
self.key)
|
||||
|
||||
def delete(self, **kwargs):
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
|
||||
return self._delete(
|
||||
self.build_url(**kwargs))
|
||||
|
||||
def find(self, base_url=None, **kwargs):
|
||||
"""Find a single item with attributes matching ``**kwargs``.
|
||||
|
||||
:param base_url: if provided, the generated URL will be appended to it
|
||||
"""
|
||||
kwargs = self._filter_kwargs(kwargs)
|
||||
|
||||
rl = self._list(
|
||||
'%(base_url)s%(query)s' % {
|
||||
'base_url': self.build_url(base_url=base_url, **kwargs),
|
||||
'query': '?%s' % parse.urlencode(kwargs) if kwargs else '',
|
||||
},
|
||||
self.collection_key)
|
||||
num = len(rl)
|
||||
|
||||
if num == 0:
|
||||
msg = _("No %(name)s matching %(args)s.") % {
|
||||
'name': self.resource_class.__name__,
|
||||
'args': kwargs
|
||||
}
|
||||
raise exceptions.NotFound(msg)
|
||||
elif num > 1:
|
||||
raise exceptions.NoUniqueMatch
|
||||
else:
|
||||
return rl[0]
|
||||
|
||||
|
||||
class Extension(HookableMixin):
|
||||
"""Extension descriptor."""
|
||||
|
||||
SUPPORTED_HOOKS = ('__pre_parse_args__', '__post_parse_args__')
|
||||
manager_class = None
|
||||
|
||||
def __init__(self, name, module):
|
||||
super(Extension, self).__init__()
|
||||
self.name = name
|
||||
self.module = module
|
||||
self._parse_extension_module()
|
||||
|
||||
def _parse_extension_module(self):
|
||||
self.manager_class = None
|
||||
for attr_name, attr_value in self.module.__dict__.items():
|
||||
if attr_name in self.SUPPORTED_HOOKS:
|
||||
self.add_hook(attr_name, attr_value)
|
||||
else:
|
||||
try:
|
||||
if issubclass(attr_value, BaseManager):
|
||||
self.manager_class = attr_value
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
def __repr__(self):
|
||||
return "<Extension '%s'>" % self.name
|
||||
|
||||
|
||||
class Resource(object):
|
||||
"""Base class for OpenStack resources (tenant, user, etc.).
|
||||
|
||||
This is pretty much just a bag for attributes.
|
||||
"""
|
||||
|
||||
HUMAN_ID = False
|
||||
NAME_ATTR = 'name'
|
||||
|
||||
def __init__(self, manager, info, loaded=False):
|
||||
"""Populate and bind to a manager.
|
||||
|
||||
:param manager: BaseManager object
|
||||
:param info: dictionary representing resource attributes
|
||||
:param loaded: prevent lazy-loading if set to True
|
||||
"""
|
||||
self.manager = manager
|
||||
self._info = info
|
||||
self._add_details(info)
|
||||
self._loaded = loaded
|
||||
|
||||
def __repr__(self):
|
||||
reprkeys = sorted(k
|
||||
for k in self.__dict__.keys()
|
||||
if k[0] != '_' and k != 'manager')
|
||||
info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys)
|
||||
return "<%s %s>" % (self.__class__.__name__, info)
|
||||
|
||||
@property
|
||||
def human_id(self):
|
||||
"""Human-readable ID which can be used for bash completion."""
|
||||
if self.HUMAN_ID:
|
||||
name = getattr(self, self.NAME_ATTR, None)
|
||||
if name is not None:
|
||||
return strutils.to_slug(name)
|
||||
return None
|
||||
|
||||
def _add_details(self, info):
|
||||
for (k, v) in info.items():
|
||||
try:
|
||||
setattr(self, k, v)
|
||||
self._info[k] = v
|
||||
except AttributeError:
|
||||
# In this case we already defined the attribute on the class
|
||||
pass
|
||||
|
||||
def __getattr__(self, k):
|
||||
if k not in self.__dict__:
|
||||
# NOTE(bcwaldon): disallow lazy-loading if already loaded once
|
||||
if not self.is_loaded():
|
||||
self.get()
|
||||
return self.__getattr__(k)
|
||||
|
||||
raise AttributeError(k)
|
||||
else:
|
||||
return self.__dict__[k]
|
||||
|
||||
def get(self):
|
||||
"""Support for lazy loading details.
|
||||
|
||||
Some clients, such as novaclient have the option to lazy load the
|
||||
details, details which can be loaded with this function.
|
||||
"""
|
||||
# set_loaded() first ... so if we have to bail, we know we tried.
|
||||
self.set_loaded(True)
|
||||
if not hasattr(self.manager, 'get'):
|
||||
return
|
||||
|
||||
new = self.manager.get(self.id)
|
||||
if new:
|
||||
self._add_details(new._info)
|
||||
self._add_details(
|
||||
{'x_request_id': self.manager.client.last_request_id})
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Resource):
|
||||
return NotImplemented
|
||||
# two resources of different types are not equal
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
return self._info == other._info
|
||||
|
||||
def is_loaded(self):
|
||||
return self._loaded
|
||||
|
||||
def set_loaded(self, val):
|
||||
self._loaded = val
|
||||
|
||||
def to_dict(self):
|
||||
return copy.deepcopy(self._info)
|
@ -1,469 +0,0 @@
|
||||
# Copyright 2010 Jacob Kaplan-Moss
|
||||
# Copyright 2011 Nebula, Inc.
|
||||
# Copyright 2013 Alessio Ababilov
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Exception definitions.
|
||||
"""
|
||||
|
||||
|
||||
import inspect
|
||||
import sys
|
||||
|
||||
import six
|
||||
from six.moves import http_client
|
||||
|
||||
from ironicclient.common.i18n import _
|
||||
|
||||
|
||||
class ClientException(Exception):
|
||||
"""The base exception class for all exceptions this library raises."""
|
||||
pass
|
||||
|
||||
|
||||
class ValidationError(ClientException):
|
||||
"""Error in validation on API client side."""
|
||||
pass
|
||||
|
||||
|
||||
class UnsupportedVersion(ClientException):
|
||||
"""User is trying to use an unsupported version of the API."""
|
||||
pass
|
||||
|
||||
|
||||
class CommandError(ClientException):
|
||||
"""Error in CLI tool."""
|
||||
pass
|
||||
|
||||
|
||||
class AuthorizationFailure(ClientException):
|
||||
"""Cannot authorize API client."""
|
||||
pass
|
||||
|
||||
|
||||
class ConnectionError(ClientException):
|
||||
"""Cannot connect to API service."""
|
||||
pass
|
||||
|
||||
|
||||
class ConnectionRefused(ConnectionError):
|
||||
"""Connection refused while trying to connect to API service."""
|
||||
pass
|
||||
|
||||
|
||||
class AuthPluginOptionsMissing(AuthorizationFailure):
|
||||
"""Auth plugin misses some options."""
|
||||
def __init__(self, opt_names):
|
||||
super(AuthPluginOptionsMissing, self).__init__(
|
||||
_("Authentication failed. Missing options: %s") %
|
||||
", ".join(opt_names))
|
||||
self.opt_names = opt_names
|
||||
|
||||
|
||||
class AuthSystemNotFound(AuthorizationFailure):
|
||||
"""User has specified an AuthSystem that is not installed."""
|
||||
def __init__(self, auth_system):
|
||||
super(AuthSystemNotFound, self).__init__(
|
||||
_("AuthSystemNotFound: %r") % auth_system)
|
||||
self.auth_system = auth_system
|
||||
|
||||
|
||||
class NoUniqueMatch(ClientException):
|
||||
"""Multiple entities found instead of one."""
|
||||
pass
|
||||
|
||||
|
||||
class EndpointException(ClientException):
|
||||
"""Something is rotten in Service Catalog."""
|
||||
pass
|
||||
|
||||
|
||||
class EndpointNotFound(EndpointException):
|
||||
"""Could not find requested endpoint in Service Catalog."""
|
||||
pass
|
||||
|
||||
|
||||
class AmbiguousEndpoints(EndpointException):
|
||||
"""Found more than one matching endpoint in Service Catalog."""
|
||||
def __init__(self, endpoints=None):
|
||||
super(AmbiguousEndpoints, self).__init__(
|
||||
_("AmbiguousEndpoints: %r") % endpoints)
|
||||
self.endpoints = endpoints
|
||||
|
||||
|
||||
class HttpError(ClientException):
|
||||
"""The base exception class for all HTTP exceptions."""
|
||||
http_status = 0
|
||||
message = _("HTTP Error")
|
||||
|
||||
def __init__(self, message=None, details=None,
|
||||
response=None, request_id=None,
|
||||
url=None, method=None, http_status=None):
|
||||
self.http_status = http_status or self.http_status
|
||||
self.message = message or self.message
|
||||
self.details = details
|
||||
self.request_id = request_id
|
||||
self.response = response
|
||||
self.url = url
|
||||
self.method = method
|
||||
formatted_string = "%s (HTTP %s)" % (self.message, self.http_status)
|
||||
if request_id:
|
||||
formatted_string += " (Request-ID: %s)" % request_id
|
||||
super(HttpError, self).__init__(formatted_string)
|
||||
|
||||
|
||||
class HTTPRedirection(HttpError):
|
||||
"""HTTP Redirection."""
|
||||
message = _("HTTP Redirection")
|
||||
|
||||
|
||||
class HTTPClientError(HttpError):
|
||||
"""Client-side HTTP error.
|
||||
|
||||
Exception for cases in which the client seems to have erred.
|
||||
"""
|
||||
message = _("HTTP Client Error")
|
||||
|
||||
|
||||
class HttpServerError(HttpError):
|
||||
"""Server-side HTTP error.
|
||||
|
||||
Exception for cases in which the server is aware that it has
|
||||
erred or is incapable of performing the request.
|
||||
"""
|
||||
message = _("HTTP Server Error")
|
||||
|
||||
|
||||
class MultipleChoices(HTTPRedirection):
|
||||
"""HTTP 300 - Multiple Choices.
|
||||
|
||||
Indicates multiple options for the resource that the client may follow.
|
||||
"""
|
||||
|
||||
http_status = http_client.MULTIPLE_CHOICES
|
||||
message = _("Multiple Choices")
|
||||
|
||||
|
||||
class BadRequest(HTTPClientError):
|
||||
"""HTTP 400 - Bad Request.
|
||||
|
||||
The request cannot be fulfilled due to bad syntax.
|
||||
"""
|
||||
http_status = http_client.BAD_REQUEST
|
||||
message = _("Bad Request")
|
||||
|
||||
|
||||
class Unauthorized(HTTPClientError):
|
||||
"""HTTP 401 - Unauthorized.
|
||||
|
||||
Similar to 403 Forbidden, but specifically for use when authentication
|
||||
is required and has failed or has not yet been provided.
|
||||
"""
|
||||
http_status = http_client.UNAUTHORIZED
|
||||
message = _("Unauthorized")
|
||||
|
||||
|
||||
class PaymentRequired(HTTPClientError):
|
||||
"""HTTP 402 - Payment Required.
|
||||
|
||||
Reserved for future use.
|
||||
"""
|
||||
http_status = http_client.PAYMENT_REQUIRED
|
||||
message = _("Payment Required")
|
||||
|
||||
|
||||
class Forbidden(HTTPClientError):
|
||||
"""HTTP 403 - Forbidden.
|
||||
|
||||
The request was a valid request, but the server is refusing to respond
|
||||
to it.
|
||||
"""
|
||||
http_status = http_client.FORBIDDEN
|
||||
message = _("Forbidden")
|
||||
|
||||
|
||||
class NotFound(HTTPClientError):
|
||||
"""HTTP 404 - Not Found.
|
||||
|
||||
The requested resource could not be found but may be available again
|
||||
in the future.
|
||||
"""
|
||||
http_status = http_client.NOT_FOUND
|
||||
message = _("Not Found")
|
||||
|
||||
|
||||
class MethodNotAllowed(HTTPClientError):
|
||||
"""HTTP 405 - Method Not Allowed.
|
||||
|
||||
A request was made of a resource using a request method not supported
|
||||
by that resource.
|
||||
"""
|
||||
http_status = http_client.METHOD_NOT_ALLOWED
|
||||
message = _("Method Not Allowed")
|
||||
|
||||
|
||||
class NotAcceptable(HTTPClientError):
|
||||
"""HTTP 406 - Not Acceptable.
|
||||
|
||||
The requested resource is only capable of generating content not
|
||||
acceptable according to the Accept headers sent in the request.
|
||||
"""
|
||||
http_status = http_client.NOT_ACCEPTABLE
|
||||
message = _("Not Acceptable")
|
||||
|
||||
|
||||
class ProxyAuthenticationRequired(HTTPClientError):
|
||||
"""HTTP 407 - Proxy Authentication Required.
|
||||
|
||||
The client must first authenticate itself with the proxy.
|
||||
"""
|
||||
http_status = http_client.PROXY_AUTHENTICATION_REQUIRED
|
||||
message = _("Proxy Authentication Required")
|
||||
|
||||
|
||||
class RequestTimeout(HTTPClientError):
|
||||
"""HTTP 408 - Request Timeout.
|
||||
|
||||
The server timed out waiting for the request.
|
||||
"""
|
||||
http_status = http_client.REQUEST_TIMEOUT
|
||||
message = _("Request Timeout")
|
||||
|
||||
|
||||
class Conflict(HTTPClientError):
|
||||
"""HTTP 409 - Conflict.
|
||||
|
||||
Indicates that the request could not be processed because of conflict
|
||||
in the request, such as an edit conflict.
|
||||
"""
|
||||
http_status = http_client.CONFLICT
|
||||
message = _("Conflict")
|
||||
|
||||
|
||||
class Gone(HTTPClientError):
|
||||
"""HTTP 410 - Gone.
|
||||
|
||||
Indicates that the resource requested is no longer available and will
|
||||
not be available again.
|
||||
"""
|
||||
http_status = http_client.GONE
|
||||
message = _("Gone")
|
||||
|
||||
|
||||
class LengthRequired(HTTPClientError):
|
||||
"""HTTP 411 - Length Required.
|
||||
|
||||
The request did not specify the length of its content, which is
|
||||
required by the requested resource.
|
||||
"""
|
||||
http_status = http_client.LENGTH_REQUIRED
|
||||
message = _("Length Required")
|
||||
|
||||
|
||||
class PreconditionFailed(HTTPClientError):
|
||||
"""HTTP 412 - Precondition Failed.
|
||||
|
||||
The server does not meet one of the preconditions that the requester
|
||||
put on the request.
|
||||
"""
|
||||
http_status = http_client.PRECONDITION_FAILED
|
||||
message = _("Precondition Failed")
|
||||
|
||||
|
||||
class RequestEntityTooLarge(HTTPClientError):
|
||||
"""HTTP 413 - Request Entity Too Large.
|
||||
|
||||
The request is larger than the server is willing or able to process.
|
||||
"""
|
||||
http_status = http_client.REQUEST_ENTITY_TOO_LARGE
|
||||
message = _("Request Entity Too Large")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
try:
|
||||
self.retry_after = int(kwargs.pop('retry_after'))
|
||||
except (KeyError, ValueError):
|
||||
self.retry_after = 0
|
||||
|
||||
super(RequestEntityTooLarge, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class RequestUriTooLong(HTTPClientError):
|
||||
"""HTTP 414 - Request-URI Too Long.
|
||||
|
||||
The URI provided was too long for the server to process.
|
||||
"""
|
||||
http_status = http_client.REQUEST_URI_TOO_LONG
|
||||
message = _("Request-URI Too Long")
|
||||
|
||||
|
||||
class UnsupportedMediaType(HTTPClientError):
|
||||
"""HTTP 415 - Unsupported Media Type.
|
||||
|
||||
The request entity has a media type which the server or resource does
|
||||
not support.
|
||||
"""
|
||||
http_status = http_client.UNSUPPORTED_MEDIA_TYPE
|
||||
message = _("Unsupported Media Type")
|
||||
|
||||
|
||||
class RequestedRangeNotSatisfiable(HTTPClientError):
|
||||
"""HTTP 416 - Requested Range Not Satisfiable.
|
||||
|
||||
The client has asked for a portion of the file, but the server cannot
|
||||
supply that portion.
|
||||
"""
|
||||
http_status = http_client.REQUESTED_RANGE_NOT_SATISFIABLE
|
||||
message = _("Requested Range Not Satisfiable")
|
||||
|
||||
|
||||
class ExpectationFailed(HTTPClientError):
|
||||
"""HTTP 417 - Expectation Failed.
|
||||
|
||||
The server cannot meet the requirements of the Expect request-header field.
|
||||
"""
|
||||
http_status = http_client.EXPECTATION_FAILED
|
||||
message = _("Expectation Failed")
|
||||
|
||||
|
||||
class UnprocessableEntity(HTTPClientError):
|
||||
"""HTTP 422 - Unprocessable Entity.
|
||||
|
||||
The request was well-formed but was unable to be followed due to semantic
|
||||
errors.
|
||||
"""
|
||||
http_status = http_client.UNPROCESSABLE_ENTITY
|
||||
message = _("Unprocessable Entity")
|
||||
|
||||
|
||||
class InternalServerError(HttpServerError):
|
||||
"""HTTP 500 - Internal Server Error.
|
||||
|
||||
A generic error message, given when no more specific message is suitable.
|
||||
"""
|
||||
http_status = http_client.INTERNAL_SERVER_ERROR
|
||||
message = _("Internal Server Error")
|
||||
|
||||
|
||||
# NotImplemented is a python keyword.
|
||||
class HttpNotImplemented(HttpServerError):
|
||||
"""HTTP 501 - Not Implemented.
|
||||
|
||||
The server either does not recognize the request method, or it lacks
|
||||
the ability to fulfill the request.
|
||||
"""
|
||||
http_status = http_client.NOT_IMPLEMENTED
|
||||
message = _("Not Implemented")
|
||||
|
||||
|
||||
class BadGateway(HttpServerError):
|
||||
"""HTTP 502 - Bad Gateway.
|
||||
|
||||
The server was acting as a gateway or proxy and received an invalid
|
||||
response from the upstream server.
|
||||
"""
|
||||
http_status = http_client.BAD_GATEWAY
|
||||
message = _("Bad Gateway")
|
||||
|
||||
|
||||
class ServiceUnavailable(HttpServerError):
|
||||
"""HTTP 503 - Service Unavailable.
|
||||
|
||||
The server is currently unavailable.
|
||||
"""
|
||||
http_status = http_client.SERVICE_UNAVAILABLE
|
||||
message = _("Service Unavailable")
|
||||
|
||||
|
||||
class GatewayTimeout(HttpServerError):
|
||||
"""HTTP 504 - Gateway Timeout.
|
||||
|
||||
The server was acting as a gateway or proxy and did not receive a timely
|
||||
response from the upstream server.
|
||||
"""
|
||||
http_status = http_client.GATEWAY_TIMEOUT
|
||||
message = _("Gateway Timeout")
|
||||
|
||||
|
||||
class HttpVersionNotSupported(HttpServerError):
|
||||
"""HTTP 505 - HttpVersion Not Supported.
|
||||
|
||||
The server does not support the HTTP protocol version used in the request.
|
||||
"""
|
||||
http_status = http_client.HTTP_VERSION_NOT_SUPPORTED
|
||||
message = _("HTTP Version Not Supported")
|
||||
|
||||
|
||||
# _code_map contains all the classes that have http_status attribute.
|
||||
_code_map = dict(
|
||||
(getattr(obj, 'http_status', None), obj)
|
||||
for name, obj in vars(sys.modules[__name__]).items()
|
||||
if inspect.isclass(obj) and getattr(obj, 'http_status', False)
|
||||
)
|
||||
|
||||
|
||||
def from_response(response, method, url):
|
||||
"""Returns an instance of :class:`HttpError` or subclass based on response.
|
||||
|
||||
:param response: instance of `requests.Response` class
|
||||
:param method: HTTP method used for request
|
||||
:param url: URL used for request
|
||||
"""
|
||||
|
||||
req_id = response.headers.get("x-openstack-request-id")
|
||||
# NOTE(hdd) true for older versions of nova and cinder
|
||||
if not req_id:
|
||||
req_id = response.headers.get("x-compute-request-id")
|
||||
kwargs = {
|
||||
"http_status": response.status_code,
|
||||
"response": response,
|
||||
"method": method,
|
||||
"url": url,
|
||||
"request_id": req_id,
|
||||
}
|
||||
if "retry-after" in response.headers:
|
||||
kwargs["retry_after"] = response.headers["retry-after"]
|
||||
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
if content_type.startswith("application/json"):
|
||||
try:
|
||||
body = response.json()
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
if isinstance(body, dict):
|
||||
error = body.get(list(body)[0])
|
||||
if isinstance(error, dict):
|
||||
kwargs["message"] = (error.get("message") or
|
||||
error.get("faultstring"))
|
||||
kwargs["details"] = (error.get("details") or
|
||||
six.text_type(body))
|
||||
elif content_type.startswith("text/"):
|
||||
kwargs["details"] = getattr(response, 'text', '')
|
||||
|
||||
try:
|
||||
cls = _code_map[response.status_code]
|
||||
except KeyError:
|
||||
# 5XX status codes are server errors
|
||||
if response.status_code >= http_client.INTERNAL_SERVER_ERROR:
|
||||
cls = HttpServerError
|
||||
# 4XX status codes are client request errors
|
||||
elif (http_client.BAD_REQUEST <= response.status_code <
|
||||
http_client.INTERNAL_SERVER_ERROR):
|
||||
cls = HTTPClientError
|
||||
else:
|
||||
cls = HttpError
|
||||
return cls(**kwargs)
|
@ -1,249 +0,0 @@
|
||||
# Copyright 2012 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Base utilities to build API operation managers and objects on top of.
|
||||
"""
|
||||
|
||||
import abc
|
||||
import copy
|
||||
import six
|
||||
|
||||
import six.moves.urllib.parse as urlparse
|
||||
|
||||
from ironicclient.common.apiclient import base
|
||||
from ironicclient import exc
|
||||
|
||||
|
||||
def getid(obj):
|
||||
"""Wrapper to get object's ID.
|
||||
|
||||
Abstracts the common pattern of allowing both an object or an
|
||||
object's ID (UUID) as a parameter when dealing with relationships.
|
||||
"""
|
||||
try:
|
||||
return obj.id
|
||||
except AttributeError:
|
||||
return obj
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Manager(object):
|
||||
"""Provides CRUD operations with a particular API."""
|
||||
|
||||
def __init__(self, api):
|
||||
self.api = api
|
||||
|
||||
def _path(self, resource_id=None):
|
||||
"""Returns a request path for a given resource identifier.
|
||||
|
||||
:param resource_id: Identifier of the resource to generate the request
|
||||
path.
|
||||
"""
|
||||
return ('/v1/%s/%s' % (self._resource_name, resource_id)
|
||||
if resource_id else '/v1/%s' % self._resource_name)
|
||||
|
||||
@abc.abstractproperty
|
||||
def resource_class(self):
|
||||
"""The resource class
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def _resource_name(self):
|
||||
"""The resource name.
|
||||
|
||||
"""
|
||||
|
||||
def _get(self, resource_id, fields=None):
|
||||
"""Retrieve a resource.
|
||||
|
||||
:param resource_id: Identifier of the resource.
|
||||
:param fields: List of specific fields to be returned.
|
||||
:raises exc.ValidationError: For invalid resource_id arg value.
|
||||
"""
|
||||
|
||||
if not resource_id:
|
||||
raise exc.ValidationError(
|
||||
"The identifier argument is invalid. "
|
||||
"Value provided: {!r}".format(resource_id))
|
||||
|
||||
if fields is not None:
|
||||
resource_id = '%s?fields=' % resource_id
|
||||
resource_id += ','.join(fields)
|
||||
|
||||
try:
|
||||
return self._list(self._path(resource_id))[0]
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
def _get_as_dict(self, resource_id, fields=None):
|
||||
"""Retrieve a resource as a dictionary
|
||||
|
||||
:param resource_id: Identifier of the resource.
|
||||
:param fields: List of specific fields to be returned.
|
||||
:returns: a dictionary representing the resource; may be empty
|
||||
"""
|
||||
|
||||
resource = self._get(resource_id, fields=fields)
|
||||
if resource:
|
||||
return resource.to_dict()
|
||||
else:
|
||||
return {}
|
||||
|
||||
def _format_body_data(self, body, response_key):
|
||||
if response_key:
|
||||
try:
|
||||
data = body[response_key]
|
||||
except KeyError:
|
||||
return []
|
||||
else:
|
||||
data = body
|
||||
|
||||
if not isinstance(data, list):
|
||||
data = [data]
|
||||
|
||||
return data
|
||||
|
||||
def _list_pagination(self, url, response_key=None, obj_class=None,
|
||||
limit=None):
|
||||
"""Retrieve a list of items.
|
||||
|
||||
The Ironic API is configured to return a maximum number of
|
||||
items per request, (see Ironic's api.max_limit option). This
|
||||
iterates over the 'next' link (pagination) in the responses,
|
||||
to get the number of items specified by 'limit'. If 'limit'
|
||||
is None this function will continue pagination until there are
|
||||
no more values to be returned.
|
||||
|
||||
:param url: a partial URL, e.g. '/nodes'
|
||||
:param response_key: the key to be looked up in response
|
||||
dictionary, e.g. 'nodes'
|
||||
:param obj_class: class for constructing the returned objects.
|
||||
:param limit: maximum number of items to return. If None returns
|
||||
everything.
|
||||
|
||||
"""
|
||||
if obj_class is None:
|
||||
obj_class = self.resource_class
|
||||
|
||||
if limit is not None:
|
||||
limit = int(limit)
|
||||
|
||||
object_list = []
|
||||
object_count = 0
|
||||
limit_reached = False
|
||||
while url:
|
||||
resp, body = self.api.json_request('GET', url)
|
||||
data = self._format_body_data(body, response_key)
|
||||
for obj in data:
|
||||
object_list.append(obj_class(self, obj, loaded=True))
|
||||
object_count += 1
|
||||
if limit and object_count >= limit:
|
||||
# break the for loop
|
||||
limit_reached = True
|
||||
break
|
||||
|
||||
# break the while loop and return
|
||||
if limit_reached:
|
||||
break
|
||||
|
||||
url = body.get('next')
|
||||
if url:
|
||||
# NOTE(lucasagomes): We need to edit the URL to remove
|
||||
# the scheme and netloc
|
||||
url_parts = list(urlparse.urlparse(url))
|
||||
url_parts[0] = url_parts[1] = ''
|
||||
url = urlparse.urlunparse(url_parts)
|
||||
|
||||
return object_list
|
||||
|
||||
def _list(self, url, response_key=None, obj_class=None, body=None):
|
||||
resp, body = self.api.json_request('GET', url)
|
||||
|
||||
if obj_class is None:
|
||||
obj_class = self.resource_class
|
||||
|
||||
data = self._format_body_data(body, response_key)
|
||||
return [obj_class(self, res, loaded=True) for res in data if res]
|
||||
|
||||
def _update(self, resource_id, patch, method='PATCH'):
|
||||
"""Update a resource.
|
||||
|
||||
:param resource_id: Resource identifier.
|
||||
:param patch: New version of a given resource.
|
||||
:param method: Name of the method for the request.
|
||||
"""
|
||||
|
||||
url = self._path(resource_id)
|
||||
resp, body = self.api.json_request(method, url, body=patch)
|
||||
# PATCH/PUT requests may not return a body
|
||||
if body:
|
||||
return self.resource_class(self, body)
|
||||
|
||||
def _delete(self, resource_id):
|
||||
"""Delete a resource.
|
||||
|
||||
:param resource_id: Resource identifier.
|
||||
"""
|
||||
self.api.raw_request('DELETE', self._path(resource_id))
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class CreateManager(Manager):
|
||||
"""Provides creation operations with a particular API."""
|
||||
|
||||
@abc.abstractproperty
|
||||
def _creation_attributes(self):
|
||||
"""A list of required creation attributes for a resource type.
|
||||
|
||||
"""
|
||||
|
||||
def create(self, **kwargs):
|
||||
"""Create a resource based on a kwargs dictionary of attributes.
|
||||
|
||||
:param kwargs: A dictionary containing the attributes of the resource
|
||||
that will be created.
|
||||
:raises exc.InvalidAttribute: For invalid attributes that are not
|
||||
needed to create the resource.
|
||||
"""
|
||||
|
||||
new = {}
|
||||
invalid = []
|
||||
for (key, value) in kwargs.items():
|
||||
if key in self._creation_attributes:
|
||||
new[key] = value
|
||||
else:
|
||||
invalid.append(key)
|
||||
if invalid:
|
||||
raise exc.InvalidAttribute(
|
||||
'The attribute(s) "%(attrs)s" are invalid; they are not '
|
||||
'needed to create %(resource)s.' %
|
||||
{'resource': self._resource_name,
|
||||
'attrs': '","'.join(invalid)})
|
||||
url = self._path()
|
||||
resp, body = self.api.json_request('POST', url, body=new)
|
||||
if body:
|
||||
return self.resource_class(self, body)
|
||||
|
||||
|
||||
class Resource(base.Resource):
|
||||
"""Represents a particular instance of an object (tenant, user, etc).
|
||||
|
||||
This is pretty much just a bag for attributes.
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
return copy.deepcopy(self._info)
|
@ -1,296 +0,0 @@
|
||||
# Copyright 2012 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# W0603: Using the global statement
|
||||
# W0621: Redefining name %s from outer scope
|
||||
# pylint: disable=W0603,W0621
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import getpass
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from oslo_utils import encodeutils
|
||||
from oslo_utils import strutils
|
||||
import prettytable
|
||||
import six
|
||||
from six import moves
|
||||
|
||||
from ironicclient.common.i18n import _
|
||||
|
||||
|
||||
class MissingArgs(Exception):
|
||||
"""Supplied arguments are not sufficient for calling a function."""
|
||||
def __init__(self, missing):
|
||||
self.missing = missing
|
||||
msg = _("Missing arguments: %s") % ", ".join(missing)
|
||||
super(MissingArgs, self).__init__(msg)
|
||||
|
||||
|
||||
def validate_args(fn, *args, **kwargs):
|
||||
"""Check that the supplied args are sufficient for calling a function.
|
||||
|
||||
>>> validate_args(lambda a: None)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
MissingArgs: Missing argument(s): a
|
||||
>>> validate_args(lambda a, b, c, d: None, 0, c=1)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
MissingArgs: Missing argument(s): b, d
|
||||
|
||||
:param fn: the function to check
|
||||
:param args: the positional arguments supplied
|
||||
:param kwargs: the keyword arguments supplied
|
||||
"""
|
||||
argspec = inspect.getargspec(fn)
|
||||
|
||||
num_defaults = len(argspec.defaults or [])
|
||||
required_args = argspec.args[:len(argspec.args) - num_defaults]
|
||||
|
||||
def isbound(method):
|
||||
return getattr(method, '__self__', None) is not None
|
||||
|
||||
if isbound(fn):
|
||||
required_args.pop(0)
|
||||
|
||||
missing = [arg for arg in required_args if arg not in kwargs]
|
||||
missing = missing[len(args):]
|
||||
if missing:
|
||||
raise MissingArgs(missing)
|
||||
|
||||
|
||||
def arg(*args, **kwargs):
|
||||
"""Decorator for CLI args.
|
||||
|
||||
Example:
|
||||
|
||||
>>> @arg("name", help="Name of the new entity")
|
||||
... def entity_create(args):
|
||||
... pass
|
||||
"""
|
||||
def _decorator(func):
|
||||
add_arg(func, *args, **kwargs)
|
||||
return func
|
||||
return _decorator
|
||||
|
||||
|
||||
def env(*args, **kwargs):
|
||||
"""Returns the first environment variable set.
|
||||
|
||||
If all are empty, defaults to '' or keyword arg `default`.
|
||||
"""
|
||||
for arg in args:
|
||||
value = os.environ.get(arg)
|
||||
if value:
|
||||
return value
|
||||
return kwargs.get('default', '')
|
||||
|
||||
|
||||
def add_arg(func, *args, **kwargs):
|
||||
"""Bind CLI arguments to a shell.py `do_foo` function."""
|
||||
|
||||
if not hasattr(func, 'arguments'):
|
||||
func.arguments = []
|
||||
|
||||
# NOTE(sirp): avoid dups that can occur when the module is shared across
|
||||
# tests.
|
||||
if (args, kwargs) not in func.arguments:
|
||||
# Because of the semantics of decorator composition if we just append
|
||||
# to the options list positional options will appear to be backwards.
|
||||
func.arguments.insert(0, (args, kwargs))
|
||||
|
||||
|
||||
def unauthenticated(func):
|
||||
"""Adds 'unauthenticated' attribute to decorated function.
|
||||
|
||||
Usage:
|
||||
|
||||
>>> @unauthenticated
|
||||
... def mymethod(f):
|
||||
... pass
|
||||
"""
|
||||
func.unauthenticated = True
|
||||
return func
|
||||
|
||||
|
||||
def isunauthenticated(func):
|
||||
"""Checks if the function does not require authentication.
|
||||
|
||||
Mark such functions with the `@unauthenticated` decorator.
|
||||
|
||||
:returns: bool
|
||||
"""
|
||||
return getattr(func, 'unauthenticated', False)
|
||||
|
||||
|
||||
def print_list(objs, fields, formatters=None, sortby_index=0,
|
||||
mixed_case_fields=None, field_labels=None, json_flag=False):
|
||||
"""Print a list of objects or dict as a table, one row per object or dict.
|
||||
|
||||
:param objs: iterable of :class:`Resource`
|
||||
:param fields: attributes that correspond to columns, in order
|
||||
:param formatters: `dict` of callables for field formatting
|
||||
:param sortby_index: index of the field for sorting table rows
|
||||
:param mixed_case_fields: fields corresponding to object attributes that
|
||||
have mixed case names (e.g., 'serverId')
|
||||
:param field_labels: Labels to use in the heading of the table, default to
|
||||
fields.
|
||||
:param json_flag: print the list as JSON instead of table
|
||||
"""
|
||||
def _get_name_and_data(field):
|
||||
if field in formatters:
|
||||
# The value of the field has to be modified.
|
||||
# For example, it can be used to add extra fields.
|
||||
return (field, formatters[field](o))
|
||||
|
||||
field_name = field.replace(' ', '_')
|
||||
if field not in mixed_case_fields:
|
||||
field_name = field.lower()
|
||||
if isinstance(o, dict):
|
||||
data = o.get(field_name, '')
|
||||
else:
|
||||
data = getattr(o, field_name, '')
|
||||
return (field_name, data)
|
||||
|
||||
formatters = formatters or {}
|
||||
mixed_case_fields = mixed_case_fields or []
|
||||
field_labels = field_labels or fields
|
||||
if len(field_labels) != len(fields):
|
||||
raise ValueError(_("Field labels list %(labels)s has different number "
|
||||
"of elements than fields list %(fields)s"),
|
||||
{'labels': field_labels, 'fields': fields})
|
||||
|
||||
if sortby_index is None:
|
||||
kwargs = {}
|
||||
else:
|
||||
kwargs = {'sortby': field_labels[sortby_index]}
|
||||
pt = prettytable.PrettyTable(field_labels)
|
||||
pt.align = 'l'
|
||||
|
||||
json_array = []
|
||||
|
||||
for o in objs:
|
||||
row = []
|
||||
for field in fields:
|
||||
row.append(_get_name_and_data(field))
|
||||
if json_flag:
|
||||
json_array.append(dict(row))
|
||||
else:
|
||||
pt.add_row([r[1] for r in row])
|
||||
|
||||
if json_flag:
|
||||
print(json.dumps(json_array, indent=4, separators=(',', ': ')))
|
||||
elif six.PY3:
|
||||
print(encodeutils.safe_encode(pt.get_string(**kwargs)).decode())
|
||||
else:
|
||||
print(encodeutils.safe_encode(pt.get_string(**kwargs)))
|
||||
|
||||
|
||||
def print_dict(dct, dict_property="Property", wrap=0, dict_value='Value',
|
||||
json_flag=False):
|
||||
"""Print a `dict` as a table of two columns.
|
||||
|
||||
:param dct: `dict` to print
|
||||
:param dict_property: name of the first column
|
||||
:param wrap: wrapping for the second column
|
||||
:param dict_value: header label for the value (second) column
|
||||
:param json_flag: print `dict` as JSON instead of table
|
||||
"""
|
||||
if json_flag:
|
||||
print(json.dumps(dct, indent=4, separators=(',', ': ')))
|
||||
return
|
||||
pt = prettytable.PrettyTable([dict_property, dict_value])
|
||||
pt.align = 'l'
|
||||
for k, v in sorted(dct.items()):
|
||||
# convert dict to str to check length
|
||||
if isinstance(v, dict):
|
||||
v = six.text_type(v)
|
||||
if wrap > 0:
|
||||
v = textwrap.fill(six.text_type(v), wrap)
|
||||
elif wrap < 0:
|
||||
raise ValueError(_("wrap argument should be a non-negative "
|
||||
"integer"))
|
||||
# if value has a newline, add in multiple rows
|
||||
# e.g. fault with stacktrace
|
||||
if v and isinstance(v, six.string_types) and r'\n' in v:
|
||||
lines = v.strip().split(r'\n')
|
||||
col1 = k
|
||||
for line in lines:
|
||||
pt.add_row([col1, line])
|
||||
col1 = ''
|
||||
else:
|
||||
pt.add_row([k, v])
|
||||
|
||||
if six.PY3:
|
||||
print(encodeutils.safe_encode(pt.get_string()).decode())
|
||||
else:
|
||||
print(encodeutils.safe_encode(pt.get_string()))
|
||||
|
||||
|
||||
def get_password(max_password_prompts=3):
|
||||
"""Read password from TTY."""
|
||||
verify = strutils.bool_from_string(env("OS_VERIFY_PASSWORD"))
|
||||
pw = None
|
||||
if hasattr(sys.stdin, "isatty") and sys.stdin.isatty():
|
||||
# Check for Ctrl-D
|
||||
try:
|
||||
for __ in moves.range(max_password_prompts):
|
||||
pw1 = getpass.getpass("OS Password: ")
|
||||
if verify:
|
||||
pw2 = getpass.getpass("Please verify: ")
|
||||
else:
|
||||
pw2 = pw1
|
||||
if pw1 == pw2 and pw1:
|
||||
pw = pw1
|
||||
break
|
||||
except EOFError:
|
||||
pass
|
||||
return pw
|
||||
|
||||
|
||||
def service_type(stype):
|
||||
"""Adds 'service_type' attribute to decorated function.
|
||||
|
||||
Usage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@service_type('volume')
|
||||
def mymethod(f):
|
||||
...
|
||||
"""
|
||||
def inner(f):
|
||||
f.service_type = stype
|
||||
return f
|
||||
return inner
|
||||
|
||||
|
||||
def get_service_type(f):
|
||||
"""Retrieves service type from function."""
|
||||
return getattr(f, 'service_type', None)
|
||||
|
||||
|
||||
def pretty_choice_list(l):
|
||||
return ', '.join("'%s'" % i for i in l)
|
||||
|
||||
|
||||
def exit(msg=''):
|
||||
if msg:
|
||||
print(msg, file=sys.stderr)
|
||||
sys.exit(1)
|
@ -1,103 +0,0 @@
|
||||
#
|
||||
# Copyright 2015 Rackspace, Inc
|
||||
# All Rights Reserved
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import appdirs
|
||||
import dogpile.cache
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
AUTHOR = 'openstack'
|
||||
PROGNAME = 'python-ironicclient'
|
||||
|
||||
CACHE = None
|
||||
CACHE_DIR = appdirs.user_cache_dir(PROGNAME, AUTHOR)
|
||||
CACHE_EXPIRY_ENV_VAR = 'IRONICCLIENT_CACHE_EXPIRY' # environment variable
|
||||
CACHE_FILENAME = os.path.join(CACHE_DIR, 'ironic-api-version.dbm')
|
||||
DEFAULT_EXPIRY = 300 # seconds
|
||||
|
||||
|
||||
def _get_cache():
|
||||
"""Configure file caching."""
|
||||
global CACHE
|
||||
if CACHE is None:
|
||||
|
||||
# Ensure cache directory present
|
||||
if not os.path.exists(CACHE_DIR):
|
||||
os.makedirs(CACHE_DIR)
|
||||
|
||||
# Use the cache expiry if specified in an env var
|
||||
expiry_time = os.environ.get(CACHE_EXPIRY_ENV_VAR, DEFAULT_EXPIRY)
|
||||
try:
|
||||
expiry_time = int(expiry_time)
|
||||
except ValueError:
|
||||
LOG.warning("Environment variable %(env_var)s should be an "
|
||||
"integer (not '%(curr_val)s'). Using default "
|
||||
"expiry of %(default)s seconds instead.",
|
||||
{'env_var': CACHE_EXPIRY_ENV_VAR,
|
||||
'curr_val': expiry_time,
|
||||
'default': DEFAULT_EXPIRY})
|
||||
expiry_time = DEFAULT_EXPIRY
|
||||
|
||||
CACHE = dogpile.cache.make_region(key_mangler=str).configure(
|
||||
'dogpile.cache.dbm',
|
||||
expiration_time=expiry_time,
|
||||
arguments={
|
||||
"filename": CACHE_FILENAME,
|
||||
}
|
||||
)
|
||||
return CACHE
|
||||
|
||||
|
||||
def _build_key(host, port):
|
||||
"""Build a key based upon the hostname or address supplied."""
|
||||
return "%s:%s" % (host, port)
|
||||
|
||||
|
||||
def save_data(host, port, data):
|
||||
"""Save 'data' for a particular 'host' in the appropriate cache dir.
|
||||
|
||||
param host: The host that we need to save data for
|
||||
param port: The port on the host that we need to save data for
|
||||
param data: The data we want saved
|
||||
"""
|
||||
key = _build_key(host, port)
|
||||
_get_cache().set(key, data)
|
||||
|
||||
|
||||
def retrieve_data(host, port, expiry=None):
|
||||
"""Retrieve the version stored for an ironic 'host', if it's not stale.
|
||||
|
||||
Check to see if there is valid cached data for the host/port
|
||||
combination and return that if it isn't stale.
|
||||
|
||||
param host: The host that we need to retrieve data for
|
||||
param port: The port on the host that we need to retrieve data for
|
||||
param expiry: The age in seconds before cached data is deemed invalid
|
||||
"""
|
||||
# Ensure that a cache file exists first
|
||||
if not os.path.isfile(CACHE_FILENAME):
|
||||
return None
|
||||
|
||||
key = _build_key(host, port)
|
||||
data = _get_cache().get(key, expiration_time=expiry)
|
||||
|
||||
if data == dogpile.cache.api.NO_VALUE:
|
||||
return None
|
||||
return data
|
@ -1,640 +0,0 @@
|
||||
# Copyright 2012 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
from distutils.version import StrictVersion
|
||||
import functools
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import ssl
|
||||
import textwrap
|
||||
import time
|
||||
|
||||
from keystoneauth1 import adapter
|
||||
from keystoneauth1 import exceptions as kexc
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import strutils
|
||||
import requests
|
||||
import six
|
||||
from six.moves import http_client
|
||||
import six.moves.urllib.parse as urlparse
|
||||
|
||||
from ironicclient.common import filecache
|
||||
from ironicclient.common.i18n import _
|
||||
from ironicclient import exc
|
||||
|
||||
|
||||
# NOTE(deva): Record the latest version that this client was tested with.
|
||||
# We still have a lot of work to do in the client to implement
|
||||
# microversion support in the client properly! See
|
||||
# http://specs.openstack.org/openstack/ironic-specs/specs/kilo/api-microversions.html # noqa
|
||||
# for full details.
|
||||
DEFAULT_VER = '1.9'
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
USER_AGENT = 'python-ironicclient'
|
||||
CHUNKSIZE = 1024 * 64 # 64kB
|
||||
|
||||
API_VERSION = '/v1'
|
||||
API_VERSION_SELECTED_STATES = ('user', 'negotiated', 'cached', 'default')
|
||||
|
||||
|
||||
DEFAULT_MAX_RETRIES = 5
|
||||
DEFAULT_RETRY_INTERVAL = 2
|
||||
SENSITIVE_HEADERS = ('X-Auth-Token',)
|
||||
|
||||
|
||||
SUPPORTED_ENDPOINT_SCHEME = ('http', 'https')
|
||||
|
||||
|
||||
def _trim_endpoint_api_version(url):
|
||||
"""Trim API version and trailing slash from endpoint."""
|
||||
return url.rstrip('/').rstrip(API_VERSION)
|
||||
|
||||
|
||||
def _extract_error_json(body):
|
||||
"""Return error_message from the HTTP response body."""
|
||||
error_json = {}
|
||||
try:
|
||||
body_json = jsonutils.loads(body)
|
||||
if 'error_message' in body_json:
|
||||
raw_msg = body_json['error_message']
|
||||
error_json = jsonutils.loads(raw_msg)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return error_json
|
||||
|
||||
|
||||
def get_server(endpoint):
|
||||
"""Extract and return the server & port that we're connecting to."""
|
||||
if endpoint is None:
|
||||
return None, None
|
||||
parts = urlparse.urlparse(endpoint)
|
||||
return parts.hostname, str(parts.port)
|
||||
|
||||
|
||||
class VersionNegotiationMixin(object):
|
||||
def negotiate_version(self, conn, resp):
|
||||
"""Negotiate the server version
|
||||
|
||||
Assumption: Called after receiving a 406 error when doing a request.
|
||||
|
||||
param conn: A connection object
|
||||
param resp: The response object from http request
|
||||
"""
|
||||
if self.api_version_select_state not in API_VERSION_SELECTED_STATES:
|
||||
raise RuntimeError(
|
||||
_('Error: self.api_version_select_state should be one of the '
|
||||
'values in: "%(valid)s" but had the value: "%(value)s"') %
|
||||
{'valid': ', '.join(API_VERSION_SELECTED_STATES),
|
||||
'value': self.api_version_select_state})
|
||||
min_ver, max_ver = self._parse_version_headers(resp)
|
||||
# NOTE: servers before commit 32fb6e99 did not return version headers
|
||||
# on error, so we need to perform a GET to determine
|
||||
# the supported version range
|
||||
if not max_ver:
|
||||
LOG.debug('No version header in response, requesting from server')
|
||||
if self.os_ironic_api_version:
|
||||
base_version = ("/v%s" %
|
||||
str(self.os_ironic_api_version).split('.')[0])
|
||||
else:
|
||||
base_version = API_VERSION
|
||||
resp = self._make_simple_request(conn, 'GET', base_version)
|
||||
min_ver, max_ver = self._parse_version_headers(resp)
|
||||
# If the user requested an explicit version or we have negotiated a
|
||||
# version and still failing then error now. The server could
|
||||
# support the version requested but the requested operation may not
|
||||
# be supported by the requested version.
|
||||
if self.api_version_select_state == 'user':
|
||||
raise exc.UnsupportedVersion(textwrap.fill(
|
||||
_("Requested API version %(req)s is not supported by the "
|
||||
"server or the requested operation is not supported by the "
|
||||
"requested version. Supported version range is %(min)s to "
|
||||
"%(max)s")
|
||||
% {'req': self.os_ironic_api_version,
|
||||
'min': min_ver, 'max': max_ver}))
|
||||
if self.api_version_select_state == 'negotiated':
|
||||
raise exc.UnsupportedVersion(textwrap.fill(
|
||||
_("No API version was specified and the requested operation "
|
||||
"was not supported by the client's negotiated API version "
|
||||
"%(req)s. Supported version range is: %(min)s to %(max)s")
|
||||
% {'req': self.os_ironic_api_version,
|
||||
'min': min_ver, 'max': max_ver}))
|
||||
|
||||
negotiated_ver = str(min(StrictVersion(self.os_ironic_api_version),
|
||||
StrictVersion(max_ver)))
|
||||
if negotiated_ver < min_ver:
|
||||
negotiated_ver = min_ver
|
||||
# server handles microversions, but doesn't support
|
||||
# the requested version, so try a negotiated version
|
||||
self.api_version_select_state = 'negotiated'
|
||||
self.os_ironic_api_version = negotiated_ver
|
||||
LOG.debug('Negotiated API version is %s', negotiated_ver)
|
||||
|
||||
# Cache the negotiated version for this server
|
||||
host, port = get_server(self.endpoint)
|
||||
filecache.save_data(host=host, port=port, data=negotiated_ver)
|
||||
|
||||
return negotiated_ver
|
||||
|
||||
def _generic_parse_version_headers(self, accessor_func):
|
||||
min_ver = accessor_func('X-OpenStack-Ironic-API-Minimum-Version',
|
||||
None)
|
||||
max_ver = accessor_func('X-OpenStack-Ironic-API-Maximum-Version',
|
||||
None)
|
||||
return min_ver, max_ver
|
||||
|
||||
def _parse_version_headers(self, accessor_func):
|
||||
# NOTE(jlvillal): Declared for unit testing purposes
|
||||
raise NotImplementedError()
|
||||
|
||||
def _make_simple_request(self, conn, method, url):
|
||||
# NOTE(jlvillal): Declared for unit testing purposes
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
_RETRY_EXCEPTIONS = (exc.Conflict, exc.ServiceUnavailable,
|
||||
exc.ConnectionRefused, kexc.RetriableConnectionFailure)
|
||||
|
||||
|
||||
def with_retries(func):
|
||||
"""Wrapper for _http_request adding support for retries."""
|
||||
@functools.wraps(func)
|
||||
def wrapper(self, url, method, **kwargs):
|
||||
if self.conflict_max_retries is None:
|
||||
self.conflict_max_retries = DEFAULT_MAX_RETRIES
|
||||
if self.conflict_retry_interval is None:
|
||||
self.conflict_retry_interval = DEFAULT_RETRY_INTERVAL
|
||||
|
||||
num_attempts = self.conflict_max_retries + 1
|
||||
for attempt in range(1, num_attempts + 1):
|
||||
try:
|
||||
return func(self, url, method, **kwargs)
|
||||
except _RETRY_EXCEPTIONS as error:
|
||||
msg = ("Error contacting Ironic server: %(error)s. "
|
||||
"Attempt %(attempt)d of %(total)d" %
|
||||
{'attempt': attempt,
|
||||
'total': num_attempts,
|
||||
'error': error})
|
||||
if attempt == num_attempts:
|
||||
LOG.error(msg)
|
||||
raise
|
||||
else:
|
||||
LOG.debug(msg)
|
||||
time.sleep(self.conflict_retry_interval)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class HTTPClient(VersionNegotiationMixin):
|
||||
|
||||
def __init__(self, endpoint, **kwargs):
|
||||
self.endpoint = endpoint
|
||||
self.endpoint_trimmed = _trim_endpoint_api_version(endpoint)
|
||||
self.auth_token = kwargs.get('token')
|
||||
self.auth_ref = kwargs.get('auth_ref')
|
||||
self.os_ironic_api_version = kwargs.get('os_ironic_api_version',
|
||||
DEFAULT_VER)
|
||||
self.api_version_select_state = kwargs.get(
|
||||
'api_version_select_state', 'default')
|
||||
self.conflict_max_retries = kwargs.pop('max_retries',
|
||||
DEFAULT_MAX_RETRIES)
|
||||
self.conflict_retry_interval = kwargs.pop('retry_interval',
|
||||
DEFAULT_RETRY_INTERVAL)
|
||||
self.session = requests.Session()
|
||||
|
||||
parts = urlparse.urlparse(endpoint)
|
||||
if parts.scheme not in SUPPORTED_ENDPOINT_SCHEME:
|
||||
msg = _('Unsupported scheme: %s') % parts.scheme
|
||||
raise exc.EndpointException(msg)
|
||||
|
||||
if parts.scheme == 'https':
|
||||
if kwargs.get('insecure') is True:
|
||||
self.session.verify = False
|
||||
elif kwargs.get('ca_file'):
|
||||
self.session.verify = kwargs['ca_file']
|
||||
self.session.cert = (kwargs.get('cert_file'),
|
||||
kwargs.get('key_file'))
|
||||
|
||||
def _process_header(self, name, value):
|
||||
"""Redacts any sensitive header
|
||||
|
||||
Redact a header that contains sensitive information, by returning an
|
||||
updated header with the sha1 hash of that value. The redacted value is
|
||||
prefixed by '{SHA1}' because that's the convention used within
|
||||
OpenStack.
|
||||
|
||||
:returns: A tuple of (name, value)
|
||||
name: the safe encoding format of name
|
||||
value: the redacted value if name is x-auth-token,
|
||||
or the safe encoding format of name
|
||||
|
||||
"""
|
||||
if name in SENSITIVE_HEADERS:
|
||||
v = value.encode('utf-8')
|
||||
h = hashlib.sha1(v)
|
||||
d = h.hexdigest()
|
||||
return (name, "{SHA1}%s" % d)
|
||||
else:
|
||||
return (name, value)
|
||||
|
||||
def log_curl_request(self, method, url, kwargs):
|
||||
curl = ['curl -i -X %s' % method]
|
||||
|
||||
for (key, value) in kwargs['headers'].items():
|
||||
header = '-H \'%s: %s\'' % self._process_header(key, value)
|
||||
curl.append(header)
|
||||
|
||||
if not self.session.verify:
|
||||
curl.append('-k')
|
||||
elif isinstance(self.session.verify, six.string_types):
|
||||
curl.append('--cacert %s' % self.session.verify)
|
||||
|
||||
if self.session.cert:
|
||||
curl.append('--cert %s' % self.session.cert[0])
|
||||
curl.append('--key %s' % self.session.cert[1])
|
||||
|
||||
if 'body' in kwargs:
|
||||
body = strutils.mask_password(kwargs['body'])
|
||||
curl.append('-d \'%s\'' % body)
|
||||
|
||||
curl.append(urlparse.urljoin(self.endpoint_trimmed, url))
|
||||
LOG.debug(' '.join(curl))
|
||||
|
||||
@staticmethod
|
||||
def log_http_response(resp, body=None):
|
||||
# NOTE(aarefiev): resp.raw is urllib3 response object, it's used
|
||||
# only to get 'version', response from request with 'stream = True'
|
||||
# should be used for raw reading.
|
||||
status = (resp.raw.version / 10.0, resp.status_code, resp.reason)
|
||||
dump = ['\nHTTP/%.1f %s %s' % status]
|
||||
dump.extend(['%s: %s' % (k, v) for k, v in resp.headers.items()])
|
||||
dump.append('')
|
||||
if body:
|
||||
body = strutils.mask_password(body)
|
||||
dump.extend([body, ''])
|
||||
LOG.debug('\n'.join(dump))
|
||||
|
||||
def _make_connection_url(self, url):
|
||||
return urlparse.urljoin(self.endpoint_trimmed, url)
|
||||
|
||||
def _parse_version_headers(self, resp):
|
||||
return self._generic_parse_version_headers(resp.headers.get)
|
||||
|
||||
def _make_simple_request(self, conn, method, url):
|
||||
return conn.request(method, self._make_connection_url(url))
|
||||
|
||||
@with_retries
|
||||
def _http_request(self, url, method, **kwargs):
|
||||
"""Send an http request with the specified characteristics.
|
||||
|
||||
Wrapper around request.Session.request to handle tasks such
|
||||
as setting headers and error handling.
|
||||
"""
|
||||
# Copy the kwargs so we can reuse the original in case of redirects
|
||||
kwargs['headers'] = copy.deepcopy(kwargs.get('headers', {}))
|
||||
kwargs['headers'].setdefault('User-Agent', USER_AGENT)
|
||||
if self.os_ironic_api_version:
|
||||
kwargs['headers'].setdefault('X-OpenStack-Ironic-API-Version',
|
||||
self.os_ironic_api_version)
|
||||
if self.auth_token:
|
||||
kwargs['headers'].setdefault('X-Auth-Token', self.auth_token)
|
||||
|
||||
self.log_curl_request(method, url, kwargs)
|
||||
|
||||
# NOTE(aarefiev): This is for backwards compatibility, request
|
||||
# expected body in 'data' field, previously we used httplib,
|
||||
# which expected 'body' field.
|
||||
body = kwargs.pop('body', None)
|
||||
if body:
|
||||
kwargs['data'] = body
|
||||
|
||||
conn_url = self._make_connection_url(url)
|
||||
try:
|
||||
resp = self.session.request(method,
|
||||
conn_url,
|
||||
**kwargs)
|
||||
|
||||
# TODO(deva): implement graceful client downgrade when connecting
|
||||
# to servers that did not support microversions. Details here:
|
||||
# https://specs.openstack.org/openstack/ironic-specs/specs/kilo-implemented/api-microversions.html#use-case-3b-new-client-communicating-with-a-old-ironic-user-specified # noqa
|
||||
|
||||
if resp.status_code == http_client.NOT_ACCEPTABLE:
|
||||
negotiated_ver = self.negotiate_version(self.session, resp)
|
||||
kwargs['headers']['X-OpenStack-Ironic-API-Version'] = (
|
||||
negotiated_ver)
|
||||
return self._http_request(url, method, **kwargs)
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
message = (_("Error has occurred while handling "
|
||||
"request for %(url)s: %(e)s") %
|
||||
dict(url=conn_url, e=e))
|
||||
# NOTE(aarefiev): not valid request(invalid url, missing schema,
|
||||
# and so on), retrying is not needed.
|
||||
if isinstance(e, ValueError):
|
||||
raise exc.ValidationError(message)
|
||||
|
||||
raise exc.ConnectionRefused(message)
|
||||
|
||||
body_str = None
|
||||
if resp.headers.get('Content-Type') == 'application/octet-stream':
|
||||
body_iter = resp.iter_content(chunk_size=CHUNKSIZE)
|
||||
self.log_http_response(resp)
|
||||
else:
|
||||
# Read body into string if it isn't obviously image data
|
||||
body_str = resp.text
|
||||
self.log_http_response(resp, body_str)
|
||||
body_iter = six.StringIO(body_str)
|
||||
|
||||
if resp.status_code >= http_client.BAD_REQUEST:
|
||||
error_json = _extract_error_json(body_str)
|
||||
# NOTE(vdrok): exceptions from ironic controllers' _lookup methods
|
||||
# are constructed directly by pecan instead of wsme, and contain
|
||||
# only description field
|
||||
raise exc.from_response(
|
||||
resp, (error_json.get('faultstring') or
|
||||
error_json.get('description')),
|
||||
error_json.get('debuginfo'), method, url)
|
||||
elif resp.status_code in (http_client.MOVED_PERMANENTLY,
|
||||
http_client.FOUND,
|
||||
http_client.USE_PROXY):
|
||||
# Redirected. Reissue the request to the new location.
|
||||
return self._http_request(resp['location'], method, **kwargs)
|
||||
elif resp.status_code == http_client.MULTIPLE_CHOICES:
|
||||
raise exc.from_response(resp, method=method, url=url)
|
||||
|
||||
return resp, body_iter
|
||||
|
||||
def json_request(self, method, url, **kwargs):
|
||||
kwargs.setdefault('headers', {})
|
||||
kwargs['headers'].setdefault('Content-Type', 'application/json')
|
||||
kwargs['headers'].setdefault('Accept', 'application/json')
|
||||
|
||||
if 'body' in kwargs:
|
||||
kwargs['body'] = jsonutils.dump_as_bytes(kwargs['body'])
|
||||
|
||||
resp, body_iter = self._http_request(url, method, **kwargs)
|
||||
content_type = resp.headers.get('Content-Type')
|
||||
|
||||
if (resp.status_code in (http_client.NO_CONTENT,
|
||||
http_client.RESET_CONTENT)
|
||||
or content_type is None):
|
||||
return resp, list()
|
||||
|
||||
if 'application/json' in content_type:
|
||||
body = ''.join([chunk for chunk in body_iter])
|
||||
try:
|
||||
body = jsonutils.loads(body)
|
||||
except ValueError:
|
||||
LOG.error('Could not decode response body as JSON')
|
||||
else:
|
||||
body = None
|
||||
|
||||
return resp, body
|
||||
|
||||
def raw_request(self, method, url, **kwargs):
|
||||
kwargs.setdefault('headers', {})
|
||||
kwargs['headers'].setdefault('Content-Type',
|
||||
'application/octet-stream')
|
||||
return self._http_request(url, method, **kwargs)
|
||||
|
||||
|
||||
class VerifiedHTTPSConnection(six.moves.http_client.HTTPSConnection):
|
||||
"""httplib-compatible connection using client-side SSL authentication
|
||||
|
||||
:see http://code.activestate.com/recipes/
|
||||
577548-https-httplib-client-connection-with-certificate-v/
|
||||
"""
|
||||
|
||||
def __init__(self, host, port, key_file=None, cert_file=None,
|
||||
ca_file=None, timeout=None, insecure=False):
|
||||
six.moves.http_client.HTTPSConnection.__init__(self, host, port,
|
||||
key_file=key_file,
|
||||
cert_file=cert_file)
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
if ca_file is not None:
|
||||
self.ca_file = ca_file
|
||||
else:
|
||||
self.ca_file = self.get_system_ca_file()
|
||||
self.timeout = timeout
|
||||
self.insecure = insecure
|
||||
|
||||
def connect(self):
|
||||
"""Connect to a host on a given (SSL) port.
|
||||
|
||||
If ca_file is pointing somewhere, use it to check Server Certificate.
|
||||
|
||||
Redefined/copied and extended from httplib.py:1105 (Python 2.6.x).
|
||||
This is needed to pass cert_reqs=ssl.CERT_REQUIRED as parameter to
|
||||
ssl.wrap_socket(), which forces SSL to check server certificate against
|
||||
our client certificate.
|
||||
"""
|
||||
sock = socket.create_connection((self.host, self.port), self.timeout)
|
||||
|
||||
if self._tunnel_host:
|
||||
self.sock = sock
|
||||
self._tunnel()
|
||||
|
||||
if self.insecure is True:
|
||||
kwargs = {'cert_reqs': ssl.CERT_NONE}
|
||||
else:
|
||||
kwargs = {'cert_reqs': ssl.CERT_REQUIRED, 'ca_certs': self.ca_file}
|
||||
|
||||
if self.cert_file:
|
||||
kwargs['certfile'] = self.cert_file
|
||||
if self.key_file:
|
||||
kwargs['keyfile'] = self.key_file
|
||||
|
||||
self.sock = ssl.wrap_socket(sock, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def get_system_ca_file():
|
||||
"""Return path to system default CA file."""
|
||||
# Standard CA file locations for Debian/Ubuntu, RedHat/Fedora,
|
||||
# Suse, FreeBSD/OpenBSD
|
||||
ca_path = ['/etc/ssl/certs/ca-certificates.crt',
|
||||
'/etc/pki/tls/certs/ca-bundle.crt',
|
||||
'/etc/ssl/ca-bundle.pem',
|
||||
'/etc/ssl/cert.pem']
|
||||
for ca in ca_path:
|
||||
if os.path.exists(ca):
|
||||
return ca
|
||||
return None
|
||||
|
||||
|
||||
class SessionClient(VersionNegotiationMixin, adapter.LegacyJsonAdapter):
|
||||
"""HTTP client based on Keystone client session."""
|
||||
|
||||
def __init__(self,
|
||||
os_ironic_api_version,
|
||||
api_version_select_state,
|
||||
max_retries,
|
||||
retry_interval,
|
||||
endpoint,
|
||||
**kwargs):
|
||||
self.os_ironic_api_version = os_ironic_api_version
|
||||
self.api_version_select_state = api_version_select_state
|
||||
self.conflict_max_retries = max_retries
|
||||
self.conflict_retry_interval = retry_interval
|
||||
self.endpoint = endpoint
|
||||
|
||||
super(SessionClient, self).__init__(**kwargs)
|
||||
|
||||
def _parse_version_headers(self, resp):
|
||||
return self._generic_parse_version_headers(resp.headers.get)
|
||||
|
||||
def _make_simple_request(self, conn, method, url):
|
||||
# NOTE: conn is self.session for this class
|
||||
return conn.request(url, method, raise_exc=False)
|
||||
|
||||
@with_retries
|
||||
def _http_request(self, url, method, **kwargs):
|
||||
kwargs.setdefault('user_agent', USER_AGENT)
|
||||
kwargs.setdefault('auth', self.auth)
|
||||
if isinstance(self.endpoint_override, six.string_types):
|
||||
kwargs.setdefault(
|
||||
'endpoint_override',
|
||||
_trim_endpoint_api_version(self.endpoint_override)
|
||||
)
|
||||
|
||||
if getattr(self, 'os_ironic_api_version', None):
|
||||
kwargs['headers'].setdefault('X-OpenStack-Ironic-API-Version',
|
||||
self.os_ironic_api_version)
|
||||
|
||||
endpoint_filter = kwargs.setdefault('endpoint_filter', {})
|
||||
endpoint_filter.setdefault('interface', self.interface)
|
||||
endpoint_filter.setdefault('service_type', self.service_type)
|
||||
endpoint_filter.setdefault('region_name', self.region_name)
|
||||
|
||||
resp = self.session.request(url, method,
|
||||
raise_exc=False, **kwargs)
|
||||
if resp.status_code == http_client.NOT_ACCEPTABLE:
|
||||
negotiated_ver = self.negotiate_version(self.session, resp)
|
||||
kwargs['headers']['X-OpenStack-Ironic-API-Version'] = (
|
||||
negotiated_ver)
|
||||
return self._http_request(url, method, **kwargs)
|
||||
if resp.status_code >= http_client.BAD_REQUEST:
|
||||
error_json = _extract_error_json(resp.content)
|
||||
# NOTE(vdrok): exceptions from ironic controllers' _lookup methods
|
||||
# are constructed directly by pecan instead of wsme, and contain
|
||||
# only description field
|
||||
raise exc.from_response(resp, (error_json.get('faultstring') or
|
||||
error_json.get('description')),
|
||||
error_json.get('debuginfo'), method, url)
|
||||
elif resp.status_code in (http_client.MOVED_PERMANENTLY,
|
||||
http_client.FOUND, http_client.USE_PROXY):
|
||||
# Redirected. Reissue the request to the new location.
|
||||
location = resp.headers.get('location')
|
||||
resp = self._http_request(location, method, **kwargs)
|
||||
elif resp.status_code == http_client.MULTIPLE_CHOICES:
|
||||
raise exc.from_response(resp, method=method, url=url)
|
||||
return resp
|
||||
|
||||
def json_request(self, method, url, **kwargs):
|
||||
kwargs.setdefault('headers', {})
|
||||
kwargs['headers'].setdefault('Content-Type', 'application/json')
|
||||
kwargs['headers'].setdefault('Accept', 'application/json')
|
||||
|
||||
if 'body' in kwargs:
|
||||
kwargs['data'] = jsonutils.dump_as_bytes(kwargs.pop('body'))
|
||||
|
||||
resp = self._http_request(url, method, **kwargs)
|
||||
body = resp.content
|
||||
content_type = resp.headers.get('content-type', None)
|
||||
status = resp.status_code
|
||||
if (status in (http_client.NO_CONTENT, http_client.RESET_CONTENT) or
|
||||
content_type is None):
|
||||
return resp, list()
|
||||
if 'application/json' in content_type:
|
||||
try:
|
||||
body = resp.json()
|
||||
except ValueError:
|
||||
LOG.error('Could not decode response body as JSON')
|
||||
else:
|
||||
body = None
|
||||
|
||||
return resp, body
|
||||
|
||||
def raw_request(self, method, url, **kwargs):
|
||||
kwargs.setdefault('headers', {})
|
||||
kwargs['headers'].setdefault('Content-Type',
|
||||
'application/octet-stream')
|
||||
return self._http_request(url, method, **kwargs)
|
||||
|
||||
|
||||
def _construct_http_client(endpoint=None,
|
||||
session=None,
|
||||
token=None,
|
||||
auth_ref=None,
|
||||
os_ironic_api_version=DEFAULT_VER,
|
||||
api_version_select_state='default',
|
||||
max_retries=DEFAULT_MAX_RETRIES,
|
||||
retry_interval=DEFAULT_RETRY_INTERVAL,
|
||||
timeout=600,
|
||||
ca_file=None,
|
||||
cert_file=None,
|
||||
key_file=None,
|
||||
insecure=None,
|
||||
**kwargs):
|
||||
if session:
|
||||
kwargs.setdefault('service_type', 'baremetal')
|
||||
kwargs.setdefault('user_agent', 'python-ironicclient')
|
||||
kwargs.setdefault('interface', kwargs.pop('endpoint_type', None))
|
||||
kwargs.setdefault('endpoint_override', endpoint)
|
||||
|
||||
ignored = {'token': token,
|
||||
'auth_ref': auth_ref,
|
||||
'timeout': timeout != 600,
|
||||
'ca_file': ca_file,
|
||||
'cert_file': cert_file,
|
||||
'key_file': key_file,
|
||||
'insecure': insecure}
|
||||
|
||||
dvars = [k for k, v in ignored.items() if v]
|
||||
|
||||
if dvars:
|
||||
LOG.warning('The following arguments are ignored when using '
|
||||
'the session to construct a client: %s',
|
||||
', '.join(dvars))
|
||||
|
||||
return SessionClient(session=session,
|
||||
os_ironic_api_version=os_ironic_api_version,
|
||||
api_version_select_state=api_version_select_state,
|
||||
max_retries=max_retries,
|
||||
retry_interval=retry_interval,
|
||||
endpoint=endpoint,
|
||||
**kwargs)
|
||||
else:
|
||||
if kwargs:
|
||||
LOG.warning('The following arguments are being ignored when '
|
||||
'constructing the client: %s'), ', '.join(kwargs)
|
||||
|
||||
return HTTPClient(endpoint=endpoint,
|
||||
token=token,
|
||||
auth_ref=auth_ref,
|
||||
os_ironic_api_version=os_ironic_api_version,
|
||||
api_version_select_state=api_version_select_state,
|
||||
max_retries=max_retries,
|
||||
retry_interval=retry_interval,
|
||||
timeout=timeout,
|
||||
ca_file=ca_file,
|
||||
cert_file=cert_file,
|
||||
key_file=key_file,
|
||||
insecure=insecure)
|
@ -1,21 +0,0 @@
|
||||
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import oslo_i18n
|
||||
|
||||
_translators = oslo_i18n.TranslatorFactory(domain='ironicclient')
|
||||
|
||||
# The primary translation function using the well-known name "_"
|
||||
_ = _translators.primary
|
@ -1,393 +0,0 @@
|
||||
# Copyright 2012 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
import gzip
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from oslo_serialization import base64
|
||||
from oslo_utils import strutils
|
||||
import six
|
||||
|
||||
from ironicclient.common.i18n import _
|
||||
from ironicclient import exc
|
||||
|
||||
|
||||
class HelpFormatter(argparse.HelpFormatter):
|
||||
def start_section(self, heading):
|
||||
super(HelpFormatter, self).start_section(heading.capitalize())
|
||||
|
||||
|
||||
def define_command(subparsers, command, callback, cmd_mapper):
|
||||
"""Define a command in the subparsers collection.
|
||||
|
||||
:param subparsers: subparsers collection where the command will go
|
||||
:param command: command name
|
||||
:param callback: function that will be used to process the command
|
||||
"""
|
||||
desc = callback.__doc__ or ''
|
||||
help = desc.strip().split('\n')[0]
|
||||
arguments = getattr(callback, 'arguments', [])
|
||||
|
||||
subparser = subparsers.add_parser(command, help=help,
|
||||
description=desc,
|
||||
add_help=False,
|
||||
formatter_class=HelpFormatter)
|
||||
subparser.add_argument('-h', '--help', action='help',
|
||||
help=argparse.SUPPRESS)
|
||||
cmd_mapper[command] = subparser
|
||||
required_args = subparser.add_argument_group(_("Required arguments"))
|
||||
|
||||
for (args, kwargs) in arguments:
|
||||
if kwargs.get('required'):
|
||||
required_args.add_argument(*args, **kwargs)
|
||||
else:
|
||||
subparser.add_argument(*args, **kwargs)
|
||||
subparser.set_defaults(func=callback)
|
||||
|
||||
|
||||
def define_commands_from_module(subparsers, command_module, cmd_mapper):
|
||||
"""Add *do_* methods in a module and add as commands into a subparsers."""
|
||||
|
||||
for method_name in (a for a in dir(command_module) if a.startswith('do_')):
|
||||
# Commands should be hypen-separated instead of underscores.
|
||||
command = method_name[3:].replace('_', '-')
|
||||
callback = getattr(command_module, method_name)
|
||||
define_command(subparsers, command, callback, cmd_mapper)
|
||||
|
||||
|
||||
def split_and_deserialize(string):
|
||||
"""Split and try to JSON deserialize a string.
|
||||
|
||||
Gets a string with the KEY=VALUE format, split it (using '=' as the
|
||||
separator) and try to JSON deserialize the VALUE.
|
||||
|
||||
:returns: A tuple of (key, value).
|
||||
"""
|
||||
try:
|
||||
key, value = string.split("=", 1)
|
||||
except ValueError:
|
||||
raise exc.CommandError(_('Attributes must be a list of '
|
||||
'PATH=VALUE not "%s"') % string)
|
||||
try:
|
||||
value = json.loads(value)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return (key, value)
|
||||
|
||||
|
||||
def key_value_pairs_to_dict(key_value_pairs):
|
||||
"""Convert a list of key-value pairs to a dictionary.
|
||||
|
||||
:param key_value_pairs: a list of strings, each string is in the form
|
||||
<key>=<value>
|
||||
:returns: a dictionary, possibly empty
|
||||
"""
|
||||
if key_value_pairs:
|
||||
return dict(split_and_deserialize(v) for v in key_value_pairs)
|
||||
return {}
|
||||
|
||||
|
||||
def args_array_to_dict(kwargs, key_to_convert):
|
||||
"""Convert the value in a dictionary entry to a dictionary.
|
||||
|
||||
From the kwargs dictionary, converts the value of the key_to_convert
|
||||
entry from a list of key-value pairs to a dictionary.
|
||||
|
||||
:param kwargs: a dictionary
|
||||
:param key_to_convert: the key (in kwargs), whose value is expected to
|
||||
be a list of key=value strings. This value will be converted to a
|
||||
dictionary.
|
||||
:returns: kwargs, the (modified) dictionary
|
||||
"""
|
||||
values_to_convert = kwargs.get(key_to_convert)
|
||||
if values_to_convert:
|
||||
kwargs[key_to_convert] = key_value_pairs_to_dict(values_to_convert)
|
||||
return kwargs
|
||||
|
||||
|
||||
def args_array_to_patch(op, attributes):
|
||||
patch = []
|
||||
for attr in attributes:
|
||||
# Sanitize
|
||||
if not attr.startswith('/'):
|
||||
attr = '/' + attr
|
||||
|
||||
if op in ['add', 'replace']:
|
||||
path, value = split_and_deserialize(attr)
|
||||
patch.append({'op': op, 'path': path, 'value': value})
|
||||
|
||||
elif op == "remove":
|
||||
# For remove only the key is needed
|
||||
patch.append({'op': op, 'path': attr})
|
||||
else:
|
||||
raise exc.CommandError(_('Unknown PATCH operation: %s') % op)
|
||||
return patch
|
||||
|
||||
|
||||
def convert_list_props_to_comma_separated(data, props=None):
|
||||
"""Convert the list-type properties to comma-separated strings
|
||||
|
||||
:param data: the input dict object.
|
||||
:param props: the properties whose values will be converted.
|
||||
Default to None to convert all list-type properties of the input.
|
||||
:returns: the result dict instance.
|
||||
"""
|
||||
result = dict(data)
|
||||
|
||||
if props is None:
|
||||
props = data.keys()
|
||||
|
||||
for prop in props:
|
||||
val = data.get(prop, None)
|
||||
if isinstance(val, list):
|
||||
result[prop] = ', '.join(map(six.text_type, val))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def common_params_for_list(args, fields, field_labels):
|
||||
"""Generate 'params' dict that is common for every 'list' command.
|
||||
|
||||
:param args: arguments from command line.
|
||||
:param fields: possible fields for sorting.
|
||||
:param field_labels: possible field labels for sorting.
|
||||
:returns: a dict with params to pass to the client method.
|
||||
"""
|
||||
params = {}
|
||||
if args.marker is not None:
|
||||
params['marker'] = args.marker
|
||||
if args.limit is not None:
|
||||
if args.limit < 0:
|
||||
raise exc.CommandError(
|
||||
_('Expected non-negative --limit, got %s') % args.limit)
|
||||
params['limit'] = args.limit
|
||||
|
||||
if args.sort_key is not None:
|
||||
# Support using both heading and field name for sort_key
|
||||
fields_map = dict(zip(field_labels, fields))
|
||||
fields_map.update(zip(fields, fields))
|
||||
try:
|
||||
sort_key = fields_map[args.sort_key]
|
||||
except KeyError:
|
||||
raise exc.CommandError(
|
||||
_("%(sort_key)s is an invalid field for sorting, "
|
||||
"valid values for --sort-key are: %(valid)s") %
|
||||
{'sort_key': args.sort_key,
|
||||
'valid': list(fields_map)})
|
||||
params['sort_key'] = sort_key
|
||||
if args.sort_dir is not None:
|
||||
if args.sort_dir not in ('asc', 'desc'):
|
||||
raise exc.CommandError(
|
||||
_("%s is an invalid value for sort direction, "
|
||||
"valid values for --sort-dir are: 'asc', 'desc'") %
|
||||
args.sort_dir)
|
||||
params['sort_dir'] = args.sort_dir
|
||||
|
||||
params['detail'] = args.detail
|
||||
|
||||
requested_fields = args.fields[0] if args.fields else None
|
||||
if requested_fields is not None:
|
||||
params['fields'] = requested_fields
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def common_filters(marker=None, limit=None, sort_key=None, sort_dir=None,
|
||||
fields=None, detail=False):
|
||||
"""Generate common filters for any list request.
|
||||
|
||||
:param marker: entity ID from which to start returning entities.
|
||||
:param limit: maximum number of entities to return.
|
||||
:param sort_key: field to use for sorting.
|
||||
:param sort_dir: direction of sorting: 'asc' or 'desc'.
|
||||
:param fields: a list with a specified set of fields of the resource
|
||||
to be returned.
|
||||
:param detail: Boolean, True to return detailed information. This parameter
|
||||
can be used for resources which accept 'detail' as a URL
|
||||
parameter.
|
||||
:returns: list of string filters.
|
||||
"""
|
||||
filters = []
|
||||
if isinstance(limit, int) and limit > 0:
|
||||
filters.append('limit=%s' % limit)
|
||||
if marker is not None:
|
||||
filters.append('marker=%s' % marker)
|
||||
if sort_key is not None:
|
||||
filters.append('sort_key=%s' % sort_key)
|
||||
if sort_dir is not None:
|
||||
filters.append('sort_dir=%s' % sort_dir)
|
||||
if fields is not None:
|
||||
filters.append('fields=%s' % ','.join(fields))
|
||||
if detail:
|
||||
filters.append('detail=True')
|
||||
return filters
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def tempdir(*args, **kwargs):
|
||||
dirname = tempfile.mkdtemp(*args, **kwargs)
|
||||
try:
|
||||
yield dirname
|
||||
finally:
|
||||
shutil.rmtree(dirname)
|
||||
|
||||
|
||||
def make_configdrive(path):
|
||||
"""Make the config drive file.
|
||||
|
||||
:param path: The directory containing the config drive files.
|
||||
:returns: A gzipped and base64 encoded configdrive string.
|
||||
|
||||
"""
|
||||
# Make sure path it's readable
|
||||
if not os.access(path, os.R_OK):
|
||||
raise exc.CommandError(_('The directory "%s" is not readable') % path)
|
||||
|
||||
with tempfile.NamedTemporaryFile() as tmpfile:
|
||||
with tempfile.NamedTemporaryFile() as tmpzipfile:
|
||||
publisher = 'ironicclient-configdrive 0.1'
|
||||
try:
|
||||
p = subprocess.Popen(['genisoimage', '-o', tmpfile.name,
|
||||
'-ldots', '-allow-lowercase',
|
||||
'-allow-multidot', '-l',
|
||||
'-publisher', publisher,
|
||||
'-quiet', '-J',
|
||||
'-r', '-V', 'config-2',
|
||||
path],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
except OSError as e:
|
||||
raise exc.CommandError(
|
||||
_('Error generating the config drive. Make sure the '
|
||||
'"genisoimage" tool is installed. Error: %s') % e)
|
||||
|
||||
stdout, stderr = p.communicate()
|
||||
if p.returncode != 0:
|
||||
raise exc.CommandError(
|
||||
_('Error generating the config drive.'
|
||||
'Stdout: "%(stdout)s". Stderr: %(stderr)s') %
|
||||
{'stdout': stdout, 'stderr': stderr})
|
||||
|
||||
# Compress file
|
||||
tmpfile.seek(0)
|
||||
g = gzip.GzipFile(fileobj=tmpzipfile, mode='wb')
|
||||
shutil.copyfileobj(tmpfile, g)
|
||||
g.close()
|
||||
|
||||
tmpzipfile.seek(0)
|
||||
return base64.encode_as_bytes(tmpzipfile.read())
|
||||
|
||||
|
||||
def check_empty_arg(arg, arg_descriptor):
|
||||
if not arg.strip():
|
||||
raise exc.CommandError(_('%(arg)s cannot be empty or only have blank'
|
||||
' spaces') % {'arg': arg_descriptor})
|
||||
|
||||
|
||||
def bool_argument_value(arg_name, bool_str, strict=True, default=False):
|
||||
"""Returns the Boolean represented by bool_str.
|
||||
|
||||
Returns the Boolean value for the argument named arg_name. The value is
|
||||
represented by the string bool_str. If the string is an invalid Boolean
|
||||
string: if strict is True, a CommandError exception is raised; otherwise
|
||||
the default value is returned.
|
||||
|
||||
:param arg_name: The name of the argument
|
||||
:param bool_str: The string representing a Boolean value
|
||||
:param strict: Used if the string is invalid. If True, raises an exception.
|
||||
If False, returns the default value.
|
||||
:param default: The default value to return if the string is invalid
|
||||
and not strict
|
||||
:returns: the Boolean value represented by bool_str or the default value
|
||||
if bool_str is invalid and strict is False
|
||||
:raises CommandError: if bool_str is an invalid Boolean string
|
||||
|
||||
"""
|
||||
try:
|
||||
val = strutils.bool_from_string(bool_str, strict, default)
|
||||
except ValueError as e:
|
||||
raise exc.CommandError(_("argument %(arg)s: %(err)s.")
|
||||
% {'arg': arg_name, 'err': e})
|
||||
return val
|
||||
|
||||
|
||||
def check_for_invalid_fields(fields, valid_fields):
|
||||
"""Check for invalid fields.
|
||||
|
||||
:param fields: A list of fields specified by the user.
|
||||
:param valid_fields: A list of valid fields.
|
||||
:raises CommandError: If invalid fields were specified by the user.
|
||||
"""
|
||||
if not fields:
|
||||
return
|
||||
|
||||
invalid_fields = set(fields) - set(valid_fields)
|
||||
if invalid_fields:
|
||||
raise exc.CommandError(
|
||||
_('Invalid field(s) requested: %(invalid)s. Valid fields '
|
||||
'are: %(valid)s.') % {'invalid': ', '.join(invalid_fields),
|
||||
'valid': ', '.join(valid_fields)})
|
||||
|
||||
|
||||
def get_from_stdin(info_desc):
|
||||
"""Read information from stdin.
|
||||
|
||||
:param info_desc: A string description of the desired information
|
||||
:raises: InvalidAttribute if there was a problem reading from stdin
|
||||
:returns: the string that was read from stdin
|
||||
"""
|
||||
try:
|
||||
info = sys.stdin.read().strip()
|
||||
except Exception as e:
|
||||
err = _("Cannot get %(desc)s from standard input. Error: %(err)s")
|
||||
raise exc.InvalidAttribute(err % {'desc': info_desc, 'err': e})
|
||||
return info
|
||||
|
||||
|
||||
def handle_json_or_file_arg(json_arg):
|
||||
"""Attempts to read JSON argument from file or string.
|
||||
|
||||
:param json_arg: May be a file name containing the JSON, or
|
||||
a JSON string.
|
||||
:returns: A list or dictionary parsed from JSON.
|
||||
:raises: InvalidAttribute if the argument cannot be parsed.
|
||||
"""
|
||||
|
||||
if os.path.isfile(json_arg):
|
||||
try:
|
||||
with open(json_arg, 'r') as f:
|
||||
json_arg = f.read().strip()
|
||||
except Exception as e:
|
||||
err = _("Cannot get JSON from file '%(file)s'. "
|
||||
"Error: %(err)s") % {'err': e, 'file': json_arg}
|
||||
raise exc.InvalidAttribute(err)
|
||||
try:
|
||||
json_arg = json.loads(json_arg)
|
||||
except ValueError as e:
|
||||
err = (_("For JSON: '%(string)s', error: '%(err)s'") %
|
||||
{'err': e, 'string': json_arg})
|
||||
raise exc.InvalidAttribute(err)
|
||||
|
||||
return json_arg
|
@ -1,71 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from ironicclient.common.apiclient import exceptions
|
||||
from ironicclient.common.apiclient.exceptions import * # noqa
|
||||
|
||||
|
||||
# NOTE(akurilin): This alias is left here since v.0.1.3 to support backwards
|
||||
# compatibility.
|
||||
InvalidEndpoint = EndpointException
|
||||
CommunicationError = ConnectionRefused
|
||||
HTTPBadRequest = BadRequest
|
||||
HTTPInternalServerError = InternalServerError
|
||||
HTTPNotFound = NotFound
|
||||
HTTPServiceUnavailable = ServiceUnavailable
|
||||
|
||||
|
||||
class AmbiguousAuthSystem(ClientException):
|
||||
"""Could not obtain token and endpoint using provided credentials."""
|
||||
pass
|
||||
|
||||
# Alias for backwards compatibility
|
||||
AmbigiousAuthSystem = AmbiguousAuthSystem
|
||||
|
||||
|
||||
class InvalidAttribute(ClientException):
|
||||
pass
|
||||
|
||||
|
||||
class StateTransitionFailed(ClientException):
|
||||
"""Failed to reach a requested provision state."""
|
||||
|
||||
|
||||
class StateTransitionTimeout(ClientException):
|
||||
"""Timed out while waiting for a requested provision state."""
|
||||
|
||||
|
||||
def from_response(response, message=None, traceback=None, method=None,
|
||||
url=None):
|
||||
"""Return an HttpError instance based on response from httplib/requests."""
|
||||
|
||||
error_body = {}
|
||||
if message:
|
||||
error_body['message'] = message
|
||||
if traceback:
|
||||
error_body['details'] = traceback
|
||||
|
||||
if hasattr(response, 'status') and not hasattr(response, 'status_code'):
|
||||
# NOTE(akurilin): These modifications around response object give
|
||||
# ability to get all necessary information in method `from_response`
|
||||
# from common code, which expecting response object from `requests`
|
||||
# library instead of object from `httplib/httplib2` library.
|
||||
response.status_code = response.status
|
||||
response.headers = {
|
||||
'Content-Type': response.getheader('content-type', "")}
|
||||
|
||||
if hasattr(response, 'status_code'):
|
||||
# NOTE(jiangfei): These modifications allow SessionClient
|
||||
# to handle faultstring.
|
||||
response.json = lambda: {'error': error_body}
|
||||
|
||||
return exceptions.from_response(response, method=method, url=url)
|
@ -1,105 +0,0 @@
|
||||
#
|
||||
# Copyright 2015 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
"""OpenStackClient plugin for Bare Metal service."""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
from ironicclient.common import http
|
||||
from osc_lib import utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
API_VERSION_OPTION = 'os_baremetal_api_version'
|
||||
API_NAME = 'baremetal'
|
||||
LAST_KNOWN_API_VERSION = 34
|
||||
API_VERSIONS = {
|
||||
'1.%d' % i: 'ironicclient.v1.client.Client'
|
||||
for i in range(1, LAST_KNOWN_API_VERSION + 1)
|
||||
}
|
||||
API_VERSIONS['1'] = API_VERSIONS[http.DEFAULT_VER]
|
||||
OS_BAREMETAL_API_VERSION_SPECIFIED = False
|
||||
MISSING_VERSION_WARNING = (
|
||||
"You are using the default API version of the OpenStack CLI baremetal "
|
||||
"(ironic) plugin. This is currently API version %s. In the future, "
|
||||
"the default will be the latest API version understood by both API "
|
||||
"and CLI. You can preserve the current behavior by passing the "
|
||||
"--os-baremetal-api-version argument with the desired version or using "
|
||||
"the OS_BAREMETAL_API_VERSION environment variable."
|
||||
)
|
||||
|
||||
|
||||
def make_client(instance):
|
||||
"""Returns a baremetal service client."""
|
||||
if (not OS_BAREMETAL_API_VERSION_SPECIFIED and not
|
||||
utils.env('OS_BAREMETAL_API_VERSION')):
|
||||
LOG.warning(MISSING_VERSION_WARNING, http.DEFAULT_VER)
|
||||
|
||||
baremetal_client_class = utils.get_client_class(
|
||||
API_NAME,
|
||||
instance._api_version[API_NAME],
|
||||
API_VERSIONS)
|
||||
LOG.debug('Instantiating baremetal client: %s', baremetal_client_class)
|
||||
LOG.debug('Baremetal API version: %s', http.DEFAULT_VER)
|
||||
|
||||
client = baremetal_client_class(
|
||||
os_ironic_api_version=instance._api_version[API_NAME],
|
||||
session=instance.session,
|
||||
region_name=instance._region_name,
|
||||
# NOTE(vdrok): This will be set as endpoint_override, and the Client
|
||||
# class will be able to do the version stripping if needed
|
||||
endpoint=instance.get_endpoint_for_service_type(
|
||||
API_NAME, interface=instance.interface,
|
||||
region_name=instance._region_name
|
||||
)
|
||||
)
|
||||
return client
|
||||
|
||||
|
||||
def build_option_parser(parser):
|
||||
"""Hook to add global options."""
|
||||
parser.add_argument(
|
||||
'--os-baremetal-api-version',
|
||||
metavar='<baremetal-api-version>',
|
||||
default=utils.env(
|
||||
'OS_BAREMETAL_API_VERSION',
|
||||
default=http.DEFAULT_VER),
|
||||
choices=sorted(
|
||||
API_VERSIONS,
|
||||
key=lambda k: [int(x) for x in k.split('.')]) + ['latest'],
|
||||
action=ReplaceLatestVersion,
|
||||
help='Baremetal API version, default=' +
|
||||
http.DEFAULT_VER +
|
||||
' (Env: OS_BAREMETAL_API_VERSION). '
|
||||
'Use "latest" for the latest known API version. '
|
||||
'The default value will change to "latest" in the Queens '
|
||||
'release.',
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
class ReplaceLatestVersion(argparse.Action):
|
||||
"""Replaces `latest` keyword by last known version."""
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
global OS_BAREMETAL_API_VERSION_SPECIFIED
|
||||
OS_BAREMETAL_API_VERSION_SPECIFIED = True
|
||||
latest = values == 'latest'
|
||||
if latest:
|
||||
values = '1.%d' % LAST_KNOWN_API_VERSION
|
||||
LOG.debug("Replacing 'latest' API version with the "
|
||||
"latest known version '%s'", values)
|
||||
setattr(namespace, self.dest, values)
|
@ -1,325 +0,0 @@
|
||||
#
|
||||
# Copyright 2016 Intel Corporation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as oscutils
|
||||
|
||||
from ironicclient.common.i18n import _
|
||||
from ironicclient.common import utils
|
||||
from ironicclient import exc
|
||||
from ironicclient.v1 import resource_fields as res_fields
|
||||
|
||||
|
||||
class CreateBaremetalChassis(command.ShowOne):
|
||||
"""Create a new chassis."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".CreateBaremetalChassis")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreateBaremetalChassis, self).get_parser(prog_name)
|
||||
|
||||
parser.add_argument(
|
||||
'--description',
|
||||
dest='description',
|
||||
metavar='<description>',
|
||||
help=_('Description for the chassis')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--extra',
|
||||
metavar='<key=value>',
|
||||
action='append',
|
||||
help=_("Record arbitrary key/value metadata. "
|
||||
"Can be specified multiple times.")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--uuid',
|
||||
metavar='<uuid>',
|
||||
help=_("Unique UUID of the chassis")
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
field_list = ['description', 'extra', 'uuid']
|
||||
fields = dict((k, v) for (k, v) in vars(parsed_args).items()
|
||||
if k in field_list and not (v is None))
|
||||
fields = utils.args_array_to_dict(fields, 'extra')
|
||||
chassis = baremetal_client.chassis.create(**fields)._info
|
||||
|
||||
chassis.pop('links', None)
|
||||
chassis.pop('nodes', None)
|
||||
|
||||
return self.dict2columns(chassis)
|
||||
|
||||
|
||||
class DeleteBaremetalChassis(command.Command):
|
||||
"""Delete a chassis."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".DeleteBaremetalChassis")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(DeleteBaremetalChassis, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
"chassis",
|
||||
metavar="<chassis>",
|
||||
nargs="+",
|
||||
help=_("UUIDs of chassis to delete")
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
failures = []
|
||||
for chassis in parsed_args.chassis:
|
||||
try:
|
||||
baremetal_client.chassis.delete(chassis)
|
||||
print(_('Deleted chassis %s') % chassis)
|
||||
except exc.ClientException as e:
|
||||
failures.append(_("Failed to delete chassis %(chassis)s: "
|
||||
"%(error)s")
|
||||
% {'chassis': chassis, 'error': e})
|
||||
|
||||
if failures:
|
||||
raise exc.ClientException("\n".join(failures))
|
||||
|
||||
|
||||
class ListBaremetalChassis(command.Lister):
|
||||
"""List the chassis."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListBaremetalChassis")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListBaremetalChassis, self).get_parser(prog_name)
|
||||
display_group = parser.add_mutually_exclusive_group(required=False)
|
||||
display_group.add_argument(
|
||||
'--fields',
|
||||
nargs='+',
|
||||
dest='fields',
|
||||
metavar='<field>',
|
||||
action='append',
|
||||
default=[],
|
||||
choices=res_fields.CHASSIS_DETAILED_RESOURCE.fields,
|
||||
help=_("One or more chassis fields. Only these fields will be "
|
||||
"fetched from the server. Cannot be used when '--long' is "
|
||||
"specified.")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
metavar='<limit>',
|
||||
type=int,
|
||||
help=_('Maximum number of chassis to return per request, '
|
||||
'0 for no limit. Default is the maximum number used '
|
||||
'by the Baremetal API Service.')
|
||||
)
|
||||
display_group.add_argument(
|
||||
'--long',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help=_("Show detailed information about the chassis")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<chassis>',
|
||||
help=_('Chassis UUID (for example, of the last chassis in the '
|
||||
'list from a previous request). Returns the list of '
|
||||
'chassis after this UUID.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar="<key>[:<direction>]",
|
||||
help=_('Sort output by specified chassis fields and directions '
|
||||
'(asc or desc) (default: asc). Multiple fields and '
|
||||
'directions can be specified, separated by comma.')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
client = self.app.client_manager.baremetal
|
||||
|
||||
columns = res_fields.CHASSIS_RESOURCE.fields
|
||||
labels = res_fields.CHASSIS_RESOURCE.labels
|
||||
|
||||
params = {}
|
||||
if parsed_args.limit is not None and parsed_args.limit < 0:
|
||||
raise exc.CommandError(
|
||||
_('Expected non-negative --limit, got %s') %
|
||||
parsed_args.limit)
|
||||
params['limit'] = parsed_args.limit
|
||||
params['marker'] = parsed_args.marker
|
||||
if parsed_args.long:
|
||||
params['detail'] = parsed_args.long
|
||||
columns = res_fields.CHASSIS_DETAILED_RESOURCE.fields
|
||||
labels = res_fields.CHASSIS_DETAILED_RESOURCE.labels
|
||||
elif parsed_args.fields:
|
||||
params['detail'] = False
|
||||
fields = itertools.chain.from_iterable(parsed_args.fields)
|
||||
resource = res_fields.Resource(list(fields))
|
||||
columns = resource.fields
|
||||
labels = resource.labels
|
||||
params['fields'] = columns
|
||||
|
||||
self.log.debug("params(%s)", params)
|
||||
data = client.chassis.list(**params)
|
||||
|
||||
data = oscutils.sort_items(data, parsed_args.sort)
|
||||
|
||||
return (labels,
|
||||
(oscutils.get_item_properties(s, columns, formatters={
|
||||
'Properties': oscutils.format_dict},) for s in data))
|
||||
|
||||
|
||||
class SetBaremetalChassis(command.Command):
|
||||
"""Set chassis properties."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".SetBaremetalChassis")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(SetBaremetalChassis, self).get_parser(prog_name)
|
||||
|
||||
parser.add_argument(
|
||||
'chassis',
|
||||
metavar='<chassis>',
|
||||
help=_("UUID of the chassis")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--description",
|
||||
metavar="<description>",
|
||||
help=_("Set the description of the chassis")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--extra",
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_('Extra to set on this chassis '
|
||||
'(repeat option to set multiple extras)')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
properties = []
|
||||
if parsed_args.description:
|
||||
description = ["description=%s" % parsed_args.description]
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', description))
|
||||
if parsed_args.extra:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ['extra/' + x for x in parsed_args.extra]))
|
||||
|
||||
if properties:
|
||||
baremetal_client.chassis.update(parsed_args.chassis, properties)
|
||||
else:
|
||||
self.log.warning("Please specify what to set.")
|
||||
|
||||
|
||||
class ShowBaremetalChassis(command.ShowOne):
|
||||
"""Show chassis details."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ShowBaremetalChassis")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowBaremetalChassis, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
"chassis",
|
||||
metavar="<chassis>",
|
||||
help=_("UUID of the chassis")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--fields',
|
||||
nargs='+',
|
||||
dest='fields',
|
||||
metavar='<field>',
|
||||
action='append',
|
||||
choices=res_fields.CHASSIS_DETAILED_RESOURCE.fields,
|
||||
default=[],
|
||||
help=_("One or more chassis fields. Only these fields will be "
|
||||
"fetched from the server.")
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
fields = list(itertools.chain.from_iterable(parsed_args.fields))
|
||||
fields = fields if fields else None
|
||||
chassis = baremetal_client.chassis.get(parsed_args.chassis,
|
||||
fields=fields)._info
|
||||
chassis.pop("links", None)
|
||||
chassis.pop("nodes", None)
|
||||
|
||||
return zip(*sorted(chassis.items()))
|
||||
|
||||
|
||||
class UnsetBaremetalChassis(command.Command):
|
||||
"""Unset chassis properties."""
|
||||
log = logging.getLogger(__name__ + ".UnsetBaremetalChassis")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(UnsetBaremetalChassis, self).get_parser(prog_name)
|
||||
|
||||
parser.add_argument(
|
||||
'chassis',
|
||||
metavar='<chassis>',
|
||||
help=_("UUID of the chassis")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--description',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_('Clear the chassis description')
|
||||
)
|
||||
parser.add_argument(
|
||||
"--extra",
|
||||
metavar="<key>",
|
||||
action='append',
|
||||
help=_('Extra to unset on this chassis '
|
||||
'(repeat option to unset multiple extras)')
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
properties = []
|
||||
if parsed_args.description:
|
||||
properties.extend(utils.args_array_to_patch('remove',
|
||||
['description']))
|
||||
if parsed_args.extra:
|
||||
properties.extend(utils.args_array_to_patch('remove',
|
||||
['extra/' + x for x in parsed_args.extra]))
|
||||
|
||||
if properties:
|
||||
baremetal_client.chassis.update(parsed_args.chassis, properties)
|
||||
else:
|
||||
self.log.warning("Please specify what to unset.")
|
@ -1,78 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
from ironicclient.common.i18n import _
|
||||
from ironicclient import exc
|
||||
from ironicclient.osc.v1 import baremetal_node
|
||||
from ironicclient.v1 import create_resources
|
||||
|
||||
|
||||
class CreateBaremetal(baremetal_node.CreateBaremetalNode):
|
||||
"""Create resources from files or Register a new node (DEPRECATED).
|
||||
|
||||
Create resources from files (by only specifying the files) or register
|
||||
a new node by specifying one or more optional arguments (DEPRECATED,
|
||||
use 'openstack baremetal node create' instead).
|
||||
"""
|
||||
|
||||
log = logging.getLogger(__name__ + ".CreateBaremetal")
|
||||
|
||||
def get_description(self):
|
||||
return _("Create resources from files (by only specifying the files) "
|
||||
"or register a new node by specifying one or more optional "
|
||||
"arguments (DEPRECATED, use 'openstack baremetal node "
|
||||
"create' instead)")
|
||||
|
||||
# TODO(vdrok): Remove support for new node creation after 11-July-2017
|
||||
# during the 'Queens' cycle.
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreateBaremetal, self).get_parser(prog_name)
|
||||
# NOTE(vdrok): It is a workaround to allow --driver to be optional for
|
||||
# openstack create command while creation of nodes via this command is
|
||||
# not removed completely
|
||||
parser = argparse.ArgumentParser(parents=[parser],
|
||||
conflict_handler='resolve',
|
||||
description=self.__doc__)
|
||||
parser.add_argument(
|
||||
'--driver',
|
||||
metavar='<driver>',
|
||||
help=_('Specify this and any other optional arguments if you want '
|
||||
'to create a node only. Note that this is deprecated; '
|
||||
'please use "openstack baremetal node create" instead.'))
|
||||
parser.add_argument(
|
||||
"resource_files", metavar="<file>", default=[], nargs="*",
|
||||
help=_("File (.yaml or .json) containing descriptions of the "
|
||||
"resources to create. Can be specified multiple times. If "
|
||||
"you want to create resources, only specify the files. Do "
|
||||
"not specify any of the optional arguments."))
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
if parsed_args.driver:
|
||||
self.log.warning("This command is deprecated. Instead, use "
|
||||
"'openstack baremetal node create'.")
|
||||
return super(CreateBaremetal, self).take_action(parsed_args)
|
||||
if not parsed_args.resource_files:
|
||||
raise exc.ValidationError(_(
|
||||
"If --driver is not supplied to openstack create command, "
|
||||
"it is considered that it will create ironic resources from "
|
||||
"one or more .json or .yaml files, but no files provided."))
|
||||
create_resources.create_resources(self.app.client_manager.baremetal,
|
||||
parsed_args.resource_files)
|
||||
# NOTE(vdrok): CreateBaremetal is still inherited from ShowOne class,
|
||||
# which requires the return value of the function to be of certain
|
||||
# type, leave this workaround until creation of nodes is removed and
|
||||
# then change it so that this inherits from command.Command
|
||||
return tuple(), tuple()
|
@ -1,228 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as oscutils
|
||||
|
||||
from ironicclient.common.i18n import _
|
||||
from ironicclient.common import utils
|
||||
from ironicclient.v1 import resource_fields as res_fields
|
||||
from ironicclient.v1 import utils as v1_utils
|
||||
|
||||
|
||||
class ListBaremetalDriver(command.Lister):
|
||||
"""List the enabled drivers."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListBaremetalDriver")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListBaremetalDriver, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--type',
|
||||
metavar='<type>',
|
||||
choices=["classic", "dynamic"],
|
||||
help='Type of driver ("classic" or "dynamic"). '
|
||||
'The default is to list all of them.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--long',
|
||||
action='store_true',
|
||||
default=None,
|
||||
help="Show detailed information about the drivers.")
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
client = self.app.client_manager.baremetal
|
||||
|
||||
params = {'driver_type': parsed_args.type,
|
||||
'detail': parsed_args.long}
|
||||
if parsed_args.long:
|
||||
labels = res_fields.DRIVER_DETAILED_RESOURCE.labels
|
||||
columns = res_fields.DRIVER_DETAILED_RESOURCE.fields
|
||||
else:
|
||||
labels = res_fields.DRIVER_RESOURCE.labels
|
||||
columns = res_fields.DRIVER_RESOURCE.fields
|
||||
|
||||
drivers = client.driver.list(**params)
|
||||
drivers = oscutils.sort_items(drivers, 'name')
|
||||
|
||||
# For list-type properties, show the values as comma separated
|
||||
# strings. It's easier to read.
|
||||
data = [utils.convert_list_props_to_comma_separated(d._info)
|
||||
for d in drivers]
|
||||
|
||||
return (labels,
|
||||
(oscutils.get_dict_properties(s, columns) for s in data))
|
||||
|
||||
|
||||
class ListBaremetalDriverProperty(command.Lister):
|
||||
"""List the driver properties."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListBaremetalDriverProperty")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListBaremetalDriverProperty, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'driver',
|
||||
metavar='<driver>',
|
||||
help='Name of the driver.')
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
driver_properties = baremetal_client.driver.properties(
|
||||
parsed_args.driver)
|
||||
labels = ['Property', 'Description']
|
||||
return labels, sorted(driver_properties.items())
|
||||
|
||||
|
||||
class ListBaremetalDriverRaidProperty(command.Lister):
|
||||
"""List a driver's RAID logical disk properties."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListBaremetalDriverRaidProperty")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListBaremetalDriverRaidProperty, self).get_parser(
|
||||
prog_name)
|
||||
parser.add_argument(
|
||||
'driver',
|
||||
metavar='<driver>',
|
||||
help='Name of the driver.')
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
raid_props = baremetal_client.driver.raid_logical_disk_properties(
|
||||
parsed_args.driver)
|
||||
labels = ['Property', 'Description']
|
||||
return labels, sorted(raid_props.items())
|
||||
|
||||
|
||||
class PassthruCallBaremetalDriver(command.ShowOne):
|
||||
"""Call a vendor passthru method for a driver."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".PassthruCallBaremetalDriver")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(PassthruCallBaremetalDriver, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'driver',
|
||||
metavar='<driver>',
|
||||
help=_('Name of the driver.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'method',
|
||||
metavar='<method>',
|
||||
help=_("Vendor passthru method to be called.")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--arg',
|
||||
metavar='<key=value>',
|
||||
action='append',
|
||||
help=_("Argument to pass to the passthru method (repeat option "
|
||||
"to specify multiple arguments).")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--http-method',
|
||||
dest='http_method',
|
||||
metavar='<http-method>',
|
||||
choices=v1_utils.HTTP_METHODS,
|
||||
default='POST',
|
||||
help=_("The HTTP method to use in the passthru request. One of "
|
||||
"%s. Defaults to 'POST'.") %
|
||||
oscutils.format_list(v1_utils.HTTP_METHODS)
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
arguments = utils.key_value_pairs_to_dict(parsed_args.arg)
|
||||
response = (baremetal_client.driver.
|
||||
vendor_passthru(parsed_args.driver,
|
||||
parsed_args.method,
|
||||
http_method=parsed_args.http_method,
|
||||
args=arguments))
|
||||
|
||||
return self.dict2columns(response)
|
||||
|
||||
|
||||
class PassthruListBaremetalDriver(command.Lister):
|
||||
"""List available vendor passthru methods for a driver."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".PassthruListBaremetalDriver")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(PassthruListBaremetalDriver, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'driver',
|
||||
metavar='<driver>',
|
||||
help=_('Name of the driver.'))
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
columns = res_fields.VENDOR_PASSTHRU_METHOD_RESOURCE.fields
|
||||
labels = res_fields.VENDOR_PASSTHRU_METHOD_RESOURCE.labels
|
||||
|
||||
methods = baremetal_client.driver.get_vendor_passthru_methods(
|
||||
parsed_args.driver)
|
||||
|
||||
params = []
|
||||
for method, response in methods.items():
|
||||
response['name'] = method
|
||||
http_methods = ', '.join(response['http_methods'])
|
||||
response['http_methods'] = http_methods
|
||||
params.append(response)
|
||||
|
||||
return (labels,
|
||||
(oscutils.get_dict_properties(s, columns) for s in params))
|
||||
|
||||
|
||||
class ShowBaremetalDriver(command.ShowOne):
|
||||
"""Show information about a driver."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ShowBaremetalDriver")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowBaremetalDriver, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'driver',
|
||||
metavar='<driver>',
|
||||
help=_('Name of the driver.'))
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
driver = baremetal_client.driver.get(parsed_args.driver)._info
|
||||
driver.pop("links", None)
|
||||
driver.pop("properties", None)
|
||||
# For list-type properties, show the values as comma separated
|
||||
# strings. It's easier to read.
|
||||
driver = utils.convert_list_props_to_comma_separated(driver)
|
||||
return zip(*sorted(driver.items()))
|
File diff suppressed because it is too large
Load Diff
@ -1,496 +0,0 @@
|
||||
#
|
||||
# Copyright 2015 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as oscutils
|
||||
|
||||
from ironicclient.common.i18n import _
|
||||
from ironicclient.common import utils
|
||||
from ironicclient import exc
|
||||
from ironicclient.v1 import resource_fields as res_fields
|
||||
|
||||
|
||||
class CreateBaremetalPort(command.ShowOne):
|
||||
"""Create a new port"""
|
||||
|
||||
log = logging.getLogger(__name__ + ".CreateBaremetalPort")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreateBaremetalPort, self).get_parser(prog_name)
|
||||
|
||||
parser.add_argument(
|
||||
'address',
|
||||
metavar='<address>',
|
||||
help=_('MAC address for this port.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--node',
|
||||
dest='node_uuid',
|
||||
metavar='<uuid>',
|
||||
required=True,
|
||||
help=_('UUID of the node that this port belongs to.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--uuid',
|
||||
dest='uuid',
|
||||
metavar='<uuid>',
|
||||
help=_('UUID of the port.'))
|
||||
parser.add_argument(
|
||||
'--extra',
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_("Record arbitrary key/value metadata. "
|
||||
"Can be specified multiple times.")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--local-link-connection',
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_("Key/value metadata describing Local link connection "
|
||||
"information. Valid keys are 'switch_info', 'switch_id', "
|
||||
"and 'port_id'. The keys 'switch_id' and 'port_id' are "
|
||||
"required. Can be specified multiple times.")
|
||||
)
|
||||
parser.add_argument(
|
||||
'-l',
|
||||
dest='local_link_connection_deprecated',
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_("DEPRECATED. Please use --local-link-connection instead. "
|
||||
"Key/value metadata describing Local link connection "
|
||||
"information. Valid keys are 'switch_info', 'switch_id', "
|
||||
"and 'port_id'. The keys 'switch_id' and 'port_id' are "
|
||||
"required. Can be specified multiple times.")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--pxe-enabled',
|
||||
metavar='<boolean>',
|
||||
help=_('Indicates whether this Port should be used when '
|
||||
'PXE booting this Node.')
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--port-group',
|
||||
dest='portgroup_uuid',
|
||||
metavar='<uuid>',
|
||||
help=_("UUID of the port group that this port belongs to."))
|
||||
|
||||
parser.add_argument(
|
||||
'--physical-network',
|
||||
dest='physical_network',
|
||||
metavar='<physical network>',
|
||||
help=_("Name of the physical network to which this port is "
|
||||
"connected."))
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
if parsed_args.local_link_connection_deprecated:
|
||||
self.log.warning("Please use --local-link-connection instead "
|
||||
"of -l, as it is deprecated and will be "
|
||||
"removed in future releases.")
|
||||
# It is parsed to either None, or to an array
|
||||
if parsed_args.local_link_connection:
|
||||
parsed_args.local_link_connection.extend(
|
||||
parsed_args.local_link_connection_deprecated)
|
||||
else:
|
||||
parsed_args.local_link_connection = (
|
||||
parsed_args.local_link_connection_deprecated)
|
||||
|
||||
field_list = ['address', 'uuid', 'extra', 'node_uuid', 'pxe_enabled',
|
||||
'local_link_connection', 'portgroup_uuid',
|
||||
'physical_network']
|
||||
fields = dict((k, v) for (k, v) in vars(parsed_args).items()
|
||||
if k in field_list and v is not None)
|
||||
fields = utils.args_array_to_dict(fields, 'extra')
|
||||
fields = utils.args_array_to_dict(fields, 'local_link_connection')
|
||||
port = baremetal_client.port.create(**fields)
|
||||
|
||||
data = dict([(f, getattr(port, f, '')) for f in
|
||||
res_fields.PORT_DETAILED_RESOURCE.fields])
|
||||
|
||||
return self.dict2columns(data)
|
||||
|
||||
|
||||
class ShowBaremetalPort(command.ShowOne):
|
||||
"""Show baremetal port details."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ShowBaremetalPort")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowBaremetalPort, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
"port",
|
||||
metavar="<id>",
|
||||
help=_("UUID of the port (or MAC address if --address is "
|
||||
"specified).")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--address',
|
||||
dest='address',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_('<id> is the MAC address (instead of the UUID) of the '
|
||||
'port.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--fields',
|
||||
nargs='+',
|
||||
dest='fields',
|
||||
metavar='<field>',
|
||||
action='append',
|
||||
choices=res_fields.PORT_DETAILED_RESOURCE.fields,
|
||||
default=[],
|
||||
help=_("One or more port fields. Only these fields will be "
|
||||
"fetched from the server.")
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
fields = list(itertools.chain.from_iterable(parsed_args.fields))
|
||||
fields = fields if fields else None
|
||||
|
||||
if parsed_args.address:
|
||||
port = baremetal_client.port.get_by_address(
|
||||
parsed_args.port, fields=fields)._info
|
||||
else:
|
||||
port = baremetal_client.port.get(
|
||||
parsed_args.port, fields=fields)._info
|
||||
|
||||
port.pop("links", None)
|
||||
return zip(*sorted(port.items()))
|
||||
|
||||
|
||||
class UnsetBaremetalPort(command.Command):
|
||||
"""Unset baremetal port properties."""
|
||||
log = logging.getLogger(__name__ + ".UnsetBaremetalPort")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(UnsetBaremetalPort, self).get_parser(prog_name)
|
||||
|
||||
parser.add_argument(
|
||||
'port',
|
||||
metavar='<port>',
|
||||
help=_("UUID of the port.")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--extra",
|
||||
metavar="<key>",
|
||||
action='append',
|
||||
help=_('Extra to unset on this baremetal port '
|
||||
'(repeat option to unset multiple extras)')
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--port-group',
|
||||
action='store_true',
|
||||
dest='portgroup',
|
||||
help=_("Remove port from the port group"))
|
||||
|
||||
parser.add_argument(
|
||||
'--physical-network',
|
||||
action='store_true',
|
||||
dest='physical_network',
|
||||
help=_("Unset the physical network on this baremetal port."))
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
properties = []
|
||||
if parsed_args.extra:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'remove',
|
||||
['extra/' + x for x in parsed_args.extra]))
|
||||
if parsed_args.portgroup:
|
||||
properties.extend(utils.args_array_to_patch('remove',
|
||||
['portgroup_uuid']))
|
||||
if parsed_args.physical_network:
|
||||
properties.extend(utils.args_array_to_patch('remove',
|
||||
['physical_network']))
|
||||
|
||||
if properties:
|
||||
baremetal_client.port.update(parsed_args.port, properties)
|
||||
else:
|
||||
self.log.warning("Please specify what to unset.")
|
||||
|
||||
|
||||
class SetBaremetalPort(command.Command):
|
||||
"""Set baremetal port properties."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".SetBaremetalPort")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(SetBaremetalPort, self).get_parser(prog_name)
|
||||
|
||||
parser.add_argument(
|
||||
'port',
|
||||
metavar='<port>',
|
||||
help=_("UUID of the port")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--node',
|
||||
dest='node_uuid',
|
||||
metavar='<uuid>',
|
||||
help=_('Set UUID of the node that this port belongs to')
|
||||
)
|
||||
parser.add_argument(
|
||||
"--address",
|
||||
metavar="<address>",
|
||||
dest='address',
|
||||
help=_("Set MAC address for this port")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--extra",
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_('Extra to set on this baremetal port '
|
||||
'(repeat option to set multiple extras)')
|
||||
)
|
||||
parser.add_argument(
|
||||
"--port-group",
|
||||
metavar="<uuid>",
|
||||
dest='portgroup_uuid',
|
||||
help=_('Set UUID of the port group that this port belongs to.'))
|
||||
parser.add_argument(
|
||||
"--local-link-connection",
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_("Key/value metadata describing local link connection "
|
||||
"information. Valid keys are 'switch_info', 'switch_id', "
|
||||
"and 'port_id'. The keys 'switch_id' and 'port_id' are "
|
||||
"required. Can be specified multiple times.")
|
||||
)
|
||||
pxe_enabled_group = parser.add_mutually_exclusive_group(required=False)
|
||||
pxe_enabled_group.add_argument(
|
||||
"--pxe-enabled",
|
||||
dest='pxe_enabled',
|
||||
default=None,
|
||||
action='store_true',
|
||||
help=_("Indicates that this port should be used when "
|
||||
"PXE booting this node (default)")
|
||||
)
|
||||
pxe_enabled_group.add_argument(
|
||||
"--pxe-disabled",
|
||||
dest='pxe_enabled',
|
||||
default=None,
|
||||
action='store_false',
|
||||
help=_("Indicates that this port should not be used when "
|
||||
"PXE booting this node")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--physical-network',
|
||||
metavar='<physical network>',
|
||||
dest='physical_network',
|
||||
help=_("Set the name of the physical network to which this port "
|
||||
"is connected."))
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
properties = []
|
||||
if parsed_args.node_uuid:
|
||||
node_uuid = ["node_uuid=%s" % parsed_args.node_uuid]
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', node_uuid))
|
||||
if parsed_args.address:
|
||||
address = ["address=%s" % parsed_args.address]
|
||||
properties.extend(utils.args_array_to_patch('add', address))
|
||||
if parsed_args.extra:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ['extra/' + x for x in parsed_args.extra]))
|
||||
if parsed_args.portgroup_uuid:
|
||||
portgroup_uuid = ["portgroup_uuid=%s" % parsed_args.portgroup_uuid]
|
||||
properties.extend(utils.args_array_to_patch('add', portgroup_uuid))
|
||||
if parsed_args.local_link_connection:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ['local_link_connection/' + x for x in
|
||||
parsed_args.local_link_connection]))
|
||||
if parsed_args.pxe_enabled is not None:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ['pxe_enabled=%s' % parsed_args.pxe_enabled]))
|
||||
if parsed_args.physical_network:
|
||||
physical_network = ["physical_network=%s" %
|
||||
parsed_args.physical_network]
|
||||
properties.extend(utils.args_array_to_patch('add',
|
||||
physical_network))
|
||||
|
||||
if properties:
|
||||
baremetal_client.port.update(parsed_args.port, properties)
|
||||
else:
|
||||
self.log.warning("Please specify what to set.")
|
||||
|
||||
|
||||
class DeleteBaremetalPort(command.Command):
|
||||
"""Delete port(s)."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".DeleteBaremetalPort")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(DeleteBaremetalPort, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
"ports",
|
||||
metavar="<port>",
|
||||
nargs="+",
|
||||
help=_("UUID(s) of the port(s) to delete.")
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
failures = []
|
||||
for port in parsed_args.ports:
|
||||
try:
|
||||
baremetal_client.port.delete(port)
|
||||
print(_('Deleted port %s') % port)
|
||||
except exc.ClientException as e:
|
||||
failures.append(_("Failed to delete port %(port)s: %(error)s")
|
||||
% {'port': port, 'error': e})
|
||||
|
||||
if failures:
|
||||
raise exc.ClientException("\n".join(failures))
|
||||
|
||||
|
||||
class ListBaremetalPort(command.Lister):
|
||||
"""List baremetal ports."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListBaremetalPort")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListBaremetalPort, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--address',
|
||||
dest='address',
|
||||
metavar='<mac-address>',
|
||||
help=_("Only show information for the port with this MAC address.")
|
||||
)
|
||||
parser.add_argument(
|
||||
'--node',
|
||||
dest='node',
|
||||
metavar='<node>',
|
||||
help=_("Only list ports of this node (name or UUID).")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--port-group",
|
||||
metavar="<port group>",
|
||||
dest='portgroup',
|
||||
help=_('Only list ports of this port group (name or UUID).'))
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
metavar='<limit>',
|
||||
type=int,
|
||||
help=_('Maximum number of ports to return per request, '
|
||||
'0 for no limit. Default is the maximum number used '
|
||||
'by the Ironic API Service.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<port>',
|
||||
help=_('Port UUID (for example, of the last port in the list '
|
||||
'from a previous request). Returns the list of ports '
|
||||
'after this UUID.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar="<key>[:<direction>]",
|
||||
help=_('Sort output by specified port fields and directions '
|
||||
'(asc or desc) (default: asc). Multiple fields and '
|
||||
'directions can be specified, separated by comma.')
|
||||
)
|
||||
display_group = parser.add_mutually_exclusive_group()
|
||||
display_group.add_argument(
|
||||
'--long',
|
||||
dest='detail',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_("Show detailed information about ports.")
|
||||
)
|
||||
display_group.add_argument(
|
||||
'--fields',
|
||||
nargs='+',
|
||||
dest='fields',
|
||||
metavar='<field>',
|
||||
action='append',
|
||||
default=[],
|
||||
choices=res_fields.PORT_DETAILED_RESOURCE.fields,
|
||||
help=_("One or more port fields. Only these fields will be "
|
||||
"fetched from the server. Can not be used when "
|
||||
"'--long' is specified.")
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
client = self.app.client_manager.baremetal
|
||||
|
||||
columns = res_fields.PORT_RESOURCE.fields
|
||||
labels = res_fields.PORT_RESOURCE.labels
|
||||
|
||||
params = {}
|
||||
if parsed_args.limit is not None and parsed_args.limit < 0:
|
||||
raise exc.CommandError(
|
||||
_('Expected non-negative --limit, got %s') %
|
||||
parsed_args.limit)
|
||||
params['limit'] = parsed_args.limit
|
||||
params['marker'] = parsed_args.marker
|
||||
|
||||
if parsed_args.address is not None:
|
||||
params['address'] = parsed_args.address
|
||||
if parsed_args.node is not None:
|
||||
params['node'] = parsed_args.node
|
||||
if parsed_args.portgroup is not None:
|
||||
params['portgroup'] = parsed_args.portgroup
|
||||
|
||||
if parsed_args.detail:
|
||||
params['detail'] = parsed_args.detail
|
||||
columns = res_fields.PORT_DETAILED_RESOURCE.fields
|
||||
labels = res_fields.PORT_DETAILED_RESOURCE.labels
|
||||
|
||||
elif parsed_args.fields:
|
||||
params['detail'] = False
|
||||
fields = itertools.chain.from_iterable(parsed_args.fields)
|
||||
resource = res_fields.Resource(list(fields))
|
||||
columns = resource.fields
|
||||
labels = resource.labels
|
||||
params['fields'] = columns
|
||||
|
||||
self.log.debug("params(%s)", params)
|
||||
data = client.port.list(**params)
|
||||
|
||||
data = oscutils.sort_items(data, parsed_args.sort)
|
||||
|
||||
return (labels,
|
||||
(oscutils.get_item_properties(s, columns, formatters={
|
||||
'extra': oscutils.format_dict},) for s in data))
|
@ -1,461 +0,0 @@
|
||||
#
|
||||
# Copyright 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as oscutils
|
||||
|
||||
from ironicclient.common.i18n import _
|
||||
from ironicclient.common import utils
|
||||
from ironicclient import exc
|
||||
from ironicclient.v1 import resource_fields as res_fields
|
||||
|
||||
|
||||
class CreateBaremetalPortGroup(command.ShowOne):
|
||||
"""Create a new baremetal port group."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".CreateBaremetalPortGroup")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreateBaremetalPortGroup, self).get_parser(prog_name)
|
||||
|
||||
parser.add_argument(
|
||||
'--node',
|
||||
dest='node_uuid',
|
||||
metavar='<uuid>',
|
||||
required=True,
|
||||
help=_('UUID of the node that this port group belongs to.'))
|
||||
parser.add_argument(
|
||||
'--address',
|
||||
metavar='<mac-address>',
|
||||
help=_('MAC address for this port group.'))
|
||||
parser.add_argument(
|
||||
'--name',
|
||||
dest='name',
|
||||
help=_('Name of the port group.'))
|
||||
parser.add_argument(
|
||||
'--uuid',
|
||||
dest='uuid',
|
||||
help=_('UUID of the port group.'))
|
||||
parser.add_argument(
|
||||
'--extra',
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_("Record arbitrary key/value metadata. "
|
||||
"Can be specified multiple times."))
|
||||
parser.add_argument(
|
||||
'--mode',
|
||||
help=_('Mode of the port group. For possible values, refer to '
|
||||
'https://www.kernel.org/doc/Documentation/networking'
|
||||
'/bonding.txt.'))
|
||||
parser.add_argument(
|
||||
'--property',
|
||||
dest='properties',
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_("Key/value property related to this port group's "
|
||||
"configuration. Can be specified multiple times."))
|
||||
standalone_ports_group = parser.add_mutually_exclusive_group()
|
||||
standalone_ports_group.add_argument(
|
||||
'--support-standalone-ports',
|
||||
action='store_true',
|
||||
help=_("Ports that are members of this port group "
|
||||
"can be used as stand-alone ports. (default)"))
|
||||
standalone_ports_group.add_argument(
|
||||
'--unsupport-standalone-ports',
|
||||
action='store_true',
|
||||
help=_("Ports that are members of this port group "
|
||||
"cannot be used as stand-alone ports."))
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
field_list = ['node_uuid', 'address', 'name', 'uuid', 'extra', 'mode',
|
||||
'properties']
|
||||
fields = dict((k, v) for (k, v) in vars(parsed_args).items()
|
||||
if k in field_list and v is not None)
|
||||
if parsed_args.support_standalone_ports:
|
||||
fields['standalone_ports_supported'] = True
|
||||
if parsed_args.unsupport_standalone_ports:
|
||||
fields['standalone_ports_supported'] = False
|
||||
|
||||
fields = utils.args_array_to_dict(fields, 'extra')
|
||||
fields = utils.args_array_to_dict(fields, 'properties')
|
||||
portgroup = baremetal_client.portgroup.create(**fields)
|
||||
|
||||
data = dict([(f, getattr(portgroup, f, '')) for f in
|
||||
res_fields.PORTGROUP_DETAILED_RESOURCE.fields])
|
||||
|
||||
return self.dict2columns(data)
|
||||
|
||||
|
||||
class ShowBaremetalPortGroup(command.ShowOne):
|
||||
"""Show baremetal port group details."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ShowBaremetalPortGroup")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowBaremetalPortGroup, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
"portgroup",
|
||||
metavar="<id>",
|
||||
help=_("UUID or name of the port group "
|
||||
"(or MAC address if --address is specified)."))
|
||||
parser.add_argument(
|
||||
'--address',
|
||||
dest='address',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_('<id> is the MAC address (instead of UUID or name) '
|
||||
'of the port group.'))
|
||||
parser.add_argument(
|
||||
'--fields',
|
||||
nargs='+',
|
||||
dest='fields',
|
||||
metavar='<field>',
|
||||
action='append',
|
||||
choices=res_fields.PORTGROUP_DETAILED_RESOURCE.fields,
|
||||
default=[],
|
||||
help=_("One or more port group fields. Only these fields will be "
|
||||
"fetched from the server."))
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
fields = list(itertools.chain.from_iterable(parsed_args.fields))
|
||||
fields = fields if fields else None
|
||||
|
||||
if parsed_args.address:
|
||||
portgroup = baremetal_client.portgroup.get_by_address(
|
||||
parsed_args.portgroup, fields=fields)._info
|
||||
else:
|
||||
portgroup = baremetal_client.portgroup.get(
|
||||
parsed_args.portgroup, fields=fields)._info
|
||||
|
||||
portgroup.pop("links", None)
|
||||
portgroup.pop("ports", None)
|
||||
return zip(*sorted(portgroup.items()))
|
||||
|
||||
|
||||
class ListBaremetalPortGroup(command.Lister):
|
||||
"""List baremetal port groups."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListBaremetalPortGroup")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListBaremetalPortGroup, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
metavar='<limit>',
|
||||
type=int,
|
||||
help=_('Maximum number of port groups to return per request, '
|
||||
'0 for no limit. Default is the maximum number used '
|
||||
'by the Baremetal API Service.'))
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
metavar='<port group>',
|
||||
help=_('Port group UUID (for example, of the last port group in '
|
||||
'the list from a previous request). Returns the list of '
|
||||
'port groups after this UUID.'))
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
metavar="<key>[:<direction>]",
|
||||
help=_('Sort output by specified port group fields and directions '
|
||||
'(asc or desc) (default: asc). Multiple fields and '
|
||||
'directions can be specified, separated by comma.'))
|
||||
parser.add_argument(
|
||||
'--address',
|
||||
metavar='<mac-address>',
|
||||
help=_("Only show information for the port group with this MAC "
|
||||
"address."))
|
||||
parser.add_argument(
|
||||
'--node',
|
||||
dest='node',
|
||||
metavar='<node>',
|
||||
help=_("Only list port groups of this node (name or UUID)."))
|
||||
|
||||
display_group = parser.add_mutually_exclusive_group(required=False)
|
||||
display_group.add_argument(
|
||||
'--long',
|
||||
default=False,
|
||||
dest='detail',
|
||||
help=_("Show detailed information about the port groups."),
|
||||
action='store_true')
|
||||
display_group.add_argument(
|
||||
'--fields',
|
||||
nargs='+',
|
||||
dest='fields',
|
||||
metavar='<field>',
|
||||
action='append',
|
||||
default=[],
|
||||
choices=res_fields.PORTGROUP_DETAILED_RESOURCE.fields,
|
||||
help=_("One or more port group fields. Only these fields will be "
|
||||
"fetched from the server. Can not be used when '--long' is "
|
||||
"specified."))
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
client = self.app.client_manager.baremetal
|
||||
|
||||
columns = res_fields.PORTGROUP_RESOURCE.fields
|
||||
labels = res_fields.PORTGROUP_RESOURCE.labels
|
||||
|
||||
params = {}
|
||||
if parsed_args.limit is not None and parsed_args.limit < 0:
|
||||
raise exc.CommandError(
|
||||
_('Expected non-negative --limit, got %s') %
|
||||
parsed_args.limit)
|
||||
params['limit'] = parsed_args.limit
|
||||
params['marker'] = parsed_args.marker
|
||||
if parsed_args.address is not None:
|
||||
params['address'] = parsed_args.address
|
||||
if parsed_args.node is not None:
|
||||
params['node'] = parsed_args.node
|
||||
|
||||
if parsed_args.detail:
|
||||
params['detail'] = parsed_args.detail
|
||||
columns = res_fields.PORTGROUP_DETAILED_RESOURCE.fields
|
||||
labels = res_fields.PORTGROUP_DETAILED_RESOURCE.labels
|
||||
elif parsed_args.fields:
|
||||
params['detail'] = False
|
||||
fields = itertools.chain.from_iterable(parsed_args.fields)
|
||||
resource = res_fields.Resource(list(fields))
|
||||
columns = resource.fields
|
||||
labels = resource.labels
|
||||
params['fields'] = columns
|
||||
|
||||
self.log.debug("params(%s)", params)
|
||||
data = client.portgroup.list(**params)
|
||||
|
||||
data = oscutils.sort_items(data, parsed_args.sort)
|
||||
|
||||
return (labels,
|
||||
(oscutils.get_item_properties(s, columns, formatters={
|
||||
'Properties': oscutils.format_dict},) for s in data))
|
||||
|
||||
|
||||
class DeleteBaremetalPortGroup(command.Command):
|
||||
"""Unregister baremetal port group(s)."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".DeleteBaremetalPortGroup")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(DeleteBaremetalPortGroup, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
"portgroups",
|
||||
metavar="<port group>",
|
||||
nargs="+",
|
||||
help=_("Port group(s) to delete (name or UUID)."))
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
failures = []
|
||||
for portgroup in parsed_args.portgroups:
|
||||
try:
|
||||
baremetal_client.portgroup.delete(portgroup)
|
||||
print(_('Deleted port group %s') % portgroup)
|
||||
except exc.ClientException as e:
|
||||
failures.append(_("Failed to delete port group %(portgroup)s: "
|
||||
" %(error)s")
|
||||
% {'portgroup': portgroup, 'error': e})
|
||||
|
||||
if failures:
|
||||
raise exc.ClientException("\n".join(failures))
|
||||
|
||||
|
||||
class SetBaremetalPortGroup(command.Command):
|
||||
"""Set baremetal port group properties."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".SetBaremetalPortGroup")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(SetBaremetalPortGroup, self).get_parser(prog_name)
|
||||
|
||||
parser.add_argument(
|
||||
'portgroup',
|
||||
metavar='<port group>',
|
||||
help=_("Name or UUID of the port group."),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--node',
|
||||
dest='node_uuid',
|
||||
metavar='<uuid>',
|
||||
help=_('Update UUID of the node that this port group belongs to.')
|
||||
)
|
||||
parser.add_argument(
|
||||
"--address",
|
||||
metavar="<mac-address>",
|
||||
help=_("MAC address for this port group."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
metavar="<name>",
|
||||
help=_("Name of the port group."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--extra",
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_('Extra to set on this baremetal port group '
|
||||
'(repeat option to set multiple extras).'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--mode',
|
||||
help=_('Mode of the port group. For possible values, refer to '
|
||||
'https://www.kernel.org/doc/Documentation/networking'
|
||||
'/bonding.txt.'))
|
||||
parser.add_argument(
|
||||
'--property',
|
||||
dest='properties',
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_("Key/value property related to this port group's "
|
||||
"configuration (repeat option to set multiple "
|
||||
"properties)."))
|
||||
standalone_ports_group = parser.add_mutually_exclusive_group()
|
||||
standalone_ports_group.add_argument(
|
||||
'--support-standalone-ports',
|
||||
action='store_true',
|
||||
default=None,
|
||||
help=_("Ports that are members of this port group "
|
||||
"can be used as stand-alone ports.")
|
||||
)
|
||||
standalone_ports_group.add_argument(
|
||||
'--unsupport-standalone-ports',
|
||||
action='store_true',
|
||||
help=_("Ports that are members of this port group "
|
||||
"cannot be used as stand-alone ports.")
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
properties = []
|
||||
if parsed_args.node_uuid:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["node_uuid=%s" % parsed_args.node_uuid]))
|
||||
if parsed_args.address:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["address=%s" % parsed_args.address]))
|
||||
if parsed_args.name:
|
||||
name = ["name=%s" % parsed_args.name]
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', name))
|
||||
if parsed_args.support_standalone_ports:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["standalone_ports_supported=True"]))
|
||||
if parsed_args.unsupport_standalone_ports:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["standalone_ports_supported=False"]))
|
||||
if parsed_args.mode:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["mode=%s" % parsed_args.mode]))
|
||||
|
||||
if parsed_args.extra:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ['extra/' + x for x in parsed_args.extra]))
|
||||
if parsed_args.properties:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ['properties/' + x for x in parsed_args.properties]))
|
||||
|
||||
if properties:
|
||||
baremetal_client.portgroup.update(parsed_args.portgroup,
|
||||
properties)
|
||||
else:
|
||||
self.log.warning("Please specify what to set.")
|
||||
|
||||
|
||||
class UnsetBaremetalPortGroup(command.Command):
|
||||
"""Unset baremetal port group properties."""
|
||||
log = logging.getLogger(__name__ + ".UnsetBaremetalPortGroup")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(UnsetBaremetalPortGroup, self).get_parser(prog_name)
|
||||
|
||||
parser.add_argument(
|
||||
'portgroup',
|
||||
metavar='<port group>',
|
||||
help=_("Name or UUID of the port group.")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
action='store_true',
|
||||
help=_("Unset the name of the port group."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--address",
|
||||
action='store_true',
|
||||
help=_("Unset the address of the port group."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--extra",
|
||||
metavar="<key>",
|
||||
action='append',
|
||||
help=_('Extra to unset on this baremetal port group '
|
||||
'(repeat option to unset multiple extras).'),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--property",
|
||||
dest='properties',
|
||||
metavar="<key>",
|
||||
action='append',
|
||||
help=_('Property to unset on this baremetal port group '
|
||||
'(repeat option to unset multiple properties).'),
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
properties = []
|
||||
if parsed_args.name:
|
||||
properties.extend(utils.args_array_to_patch('remove',
|
||||
['name']))
|
||||
if parsed_args.address:
|
||||
properties.extend(utils.args_array_to_patch('remove',
|
||||
['address']))
|
||||
if parsed_args.extra:
|
||||
properties.extend(utils.args_array_to_patch('remove',
|
||||
['extra/' + x for x in parsed_args.extra]))
|
||||
if parsed_args.properties:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'remove', ['properties/' + x for x in parsed_args.properties]))
|
||||
|
||||
if properties:
|
||||
baremetal_client.portgroup.update(parsed_args.portgroup,
|
||||
properties)
|
||||
else:
|
||||
self.log.warning("Please specify what to unset.")
|
@ -1,362 +0,0 @@
|
||||
# Copyright 2017 FUJITSU LIMITED
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as oscutils
|
||||
|
||||
from ironicclient.common.i18n import _
|
||||
from ironicclient.common import utils
|
||||
from ironicclient import exc
|
||||
from ironicclient.v1 import resource_fields as res_fields
|
||||
|
||||
|
||||
class CreateBaremetalVolumeConnector(command.ShowOne):
|
||||
"""Create a new baremetal volume connector."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".CreateBaremetalVolumeConnector")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = (
|
||||
super(CreateBaremetalVolumeConnector, self).get_parser(prog_name))
|
||||
|
||||
parser.add_argument(
|
||||
'--node',
|
||||
dest='node_uuid',
|
||||
metavar='<uuid>',
|
||||
required=True,
|
||||
help=_('UUID of the node that this volume connector belongs to.'))
|
||||
parser.add_argument(
|
||||
'--type',
|
||||
dest='type',
|
||||
metavar="<type>",
|
||||
required=True,
|
||||
choices=('iqn', 'ip', 'mac', 'wwnn', 'wwpn'),
|
||||
help=_("Type of the volume connector. Can be 'iqn', 'ip', 'mac', "
|
||||
"'wwnn', 'wwpn'."))
|
||||
parser.add_argument(
|
||||
'--connector-id',
|
||||
dest='connector_id',
|
||||
required=True,
|
||||
metavar="<connector id>",
|
||||
help=_("ID of the volume connector in the specified type. For "
|
||||
"example, the iSCSI initiator IQN for the node if the type "
|
||||
"is 'iqn'."))
|
||||
parser.add_argument(
|
||||
'--uuid',
|
||||
dest='uuid',
|
||||
metavar='<uuid>',
|
||||
help=_("UUID of the volume connector."))
|
||||
parser.add_argument(
|
||||
'--extra',
|
||||
dest='extra',
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_("Record arbitrary key/value metadata. "
|
||||
"Can be specified multiple times."))
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)" % parsed_args)
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
field_list = ['extra', 'type', 'connector_id', 'node_uuid', 'uuid']
|
||||
fields = dict((k, v) for (k, v) in vars(parsed_args).items()
|
||||
if k in field_list and v is not None)
|
||||
fields = utils.args_array_to_dict(fields, 'extra')
|
||||
volume_connector = baremetal_client.volume_connector.create(**fields)
|
||||
|
||||
data = dict([(f, getattr(volume_connector, f, '')) for f in
|
||||
res_fields.VOLUME_CONNECTOR_DETAILED_RESOURCE.fields])
|
||||
return self.dict2columns(data)
|
||||
|
||||
|
||||
class ShowBaremetalVolumeConnector(command.ShowOne):
|
||||
"""Show baremetal volume connector details."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ShowBaremetalVolumeConnector")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = (
|
||||
super(ShowBaremetalVolumeConnector, self).get_parser(prog_name))
|
||||
|
||||
parser.add_argument(
|
||||
'volume_connector',
|
||||
metavar='<id>',
|
||||
help=_("UUID of the volume connector."))
|
||||
parser.add_argument(
|
||||
'--fields',
|
||||
nargs='+',
|
||||
dest='fields',
|
||||
metavar='<field>',
|
||||
action='append',
|
||||
choices=res_fields.VOLUME_CONNECTOR_DETAILED_RESOURCE.fields,
|
||||
default=[],
|
||||
help=_("One or more volume connector fields. Only these fields "
|
||||
"will be fetched from the server."))
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
fields = list(itertools.chain.from_iterable(parsed_args.fields))
|
||||
fields = fields if fields else None
|
||||
|
||||
volume_connector = baremetal_client.volume_connector.get(
|
||||
parsed_args.volume_connector, fields=fields)._info
|
||||
|
||||
volume_connector.pop("links", None)
|
||||
return zip(*sorted(volume_connector.items()))
|
||||
|
||||
|
||||
class ListBaremetalVolumeConnector(command.Lister):
|
||||
"""List baremetal volume connectors."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListBaremetalVolumeConnector")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = (
|
||||
super(ListBaremetalVolumeConnector, self).get_parser(prog_name))
|
||||
|
||||
parser.add_argument(
|
||||
'--node',
|
||||
dest='node',
|
||||
metavar='<node>',
|
||||
help=_("Only list volume connectors of this node (name or UUID)."))
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
dest='limit',
|
||||
metavar='<limit>',
|
||||
type=int,
|
||||
help=_('Maximum number of volume connectors to return per '
|
||||
'request, 0 for no limit. Default is the maximum number '
|
||||
'used by the Baremetal API Service.'))
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
dest='marker',
|
||||
metavar='<volume connector>',
|
||||
help=_('Volume connector UUID (for example, of the last volume '
|
||||
'connector in the list from a previous request). Returns '
|
||||
'the list of volume connectors after this UUID.'))
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
dest='sort',
|
||||
metavar='<key>[:<direction>]',
|
||||
help=_('Sort output by specified volume connector fields and '
|
||||
'directions (asc or desc) (default:asc). Multiple fields '
|
||||
'and directions can be specified, separated by comma.'))
|
||||
|
||||
display_group = parser.add_mutually_exclusive_group(required=False)
|
||||
display_group.add_argument(
|
||||
'--long',
|
||||
dest='detail',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_("Show detailed information about volume connectors."))
|
||||
display_group.add_argument(
|
||||
'--fields',
|
||||
nargs='+',
|
||||
dest='fields',
|
||||
metavar='<field>',
|
||||
action='append',
|
||||
default=[],
|
||||
choices=res_fields.VOLUME_CONNECTOR_DETAILED_RESOURCE.fields,
|
||||
help=_("One or more volume connector fields. Only these fields "
|
||||
"will be fetched from the server. Can not be used when "
|
||||
"'--long' is specified."))
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)" % parsed_args)
|
||||
client = self.app.client_manager.baremetal
|
||||
|
||||
columns = res_fields.VOLUME_CONNECTOR_RESOURCE.fields
|
||||
labels = res_fields.VOLUME_CONNECTOR_RESOURCE.labels
|
||||
|
||||
params = {}
|
||||
if parsed_args.limit is not None and parsed_args.limit < 0:
|
||||
raise exc.CommandError(
|
||||
_('Expected non-negative --limit, got %s') %
|
||||
parsed_args.limit)
|
||||
params['limit'] = parsed_args.limit
|
||||
params['marker'] = parsed_args.marker
|
||||
if parsed_args.node is not None:
|
||||
params['node'] = parsed_args.node
|
||||
|
||||
if parsed_args.detail:
|
||||
params['detail'] = parsed_args.detail
|
||||
columns = res_fields.VOLUME_CONNECTOR_DETAILED_RESOURCE.fields
|
||||
labels = res_fields.VOLUME_CONNECTOR_DETAILED_RESOURCE.labels
|
||||
elif parsed_args.fields:
|
||||
params['detail'] = False
|
||||
fields = itertools.chain.from_iterable(parsed_args.fields)
|
||||
resource = res_fields.Resource(list(fields))
|
||||
columns = resource.fields
|
||||
labels = resource.labels
|
||||
params['fields'] = columns
|
||||
|
||||
self.log.debug("params(%s)" % params)
|
||||
data = client.volume_connector.list(**params)
|
||||
|
||||
data = oscutils.sort_items(data, parsed_args.sort)
|
||||
|
||||
return (labels,
|
||||
(oscutils.get_item_properties(s, columns, formatters={
|
||||
'Properties': oscutils.format_dict},) for s in data))
|
||||
|
||||
|
||||
class DeleteBaremetalVolumeConnector(command.Command):
|
||||
"""Unregister baremetal volume connector(s)."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".DeleteBaremetalVolumeConnector")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = (
|
||||
super(DeleteBaremetalVolumeConnector, self).get_parser(prog_name))
|
||||
parser.add_argument(
|
||||
'volume_connectors',
|
||||
metavar='<volume connector>',
|
||||
nargs='+',
|
||||
help=_("UUID(s) of the volume connector(s) to delete."))
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
failures = []
|
||||
for volume_connector in parsed_args.volume_connectors:
|
||||
try:
|
||||
baremetal_client.volume_connector.delete(volume_connector)
|
||||
print(_('Deleted volume connector %s') % volume_connector)
|
||||
except exc.ClientException as e:
|
||||
failures.append(_("Failed to delete volume connector "
|
||||
"%(volume_connector)s: %(error)s")
|
||||
% {'volume_connector': volume_connector,
|
||||
'error': e})
|
||||
|
||||
if failures:
|
||||
raise exc.ClientException("\n".join(failures))
|
||||
|
||||
|
||||
class SetBaremetalVolumeConnector(command.Command):
|
||||
"""Set baremetal volume connector properties."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".SetBaremetalVolumeConnector")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = (
|
||||
super(SetBaremetalVolumeConnector, self).get_parser(prog_name))
|
||||
|
||||
parser.add_argument(
|
||||
'volume_connector',
|
||||
metavar='<volume connector>',
|
||||
help=_("UUID of the volume connector."))
|
||||
parser.add_argument(
|
||||
'--node',
|
||||
dest='node_uuid',
|
||||
metavar='<uuid>',
|
||||
help=_('UUID of the node that this volume connector belongs to.'))
|
||||
parser.add_argument(
|
||||
'--type',
|
||||
dest='type',
|
||||
metavar="<type>",
|
||||
choices=('iqn', 'ip', 'mac', 'wwnn', 'wwpn'),
|
||||
help=_("Type of the volume connector. Can be 'iqn', 'ip', 'mac', "
|
||||
"'wwnn', 'wwpn'."))
|
||||
parser.add_argument(
|
||||
'--connector-id',
|
||||
dest='connector_id',
|
||||
metavar="<connector id>",
|
||||
help=_("ID of the volume connector in the specified type."))
|
||||
parser.add_argument(
|
||||
'--extra',
|
||||
dest='extra',
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_("Record arbitrary key/value metadata. "
|
||||
"Can be specified multiple times."))
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
properties = []
|
||||
if parsed_args.node_uuid:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["node_uuid=%s" % parsed_args.node_uuid]))
|
||||
if parsed_args.type:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["type=%s" % parsed_args.type]))
|
||||
if parsed_args.connector_id:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["connector_id=%s" % parsed_args.connector_id]))
|
||||
|
||||
if parsed_args.extra:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["extra/" + x for x in parsed_args.extra]))
|
||||
|
||||
if properties:
|
||||
baremetal_client.volume_connector.update(
|
||||
parsed_args.volume_connector, properties)
|
||||
else:
|
||||
self.log.warning("Please specify what to set.")
|
||||
|
||||
|
||||
class UnsetBaremetalVolumeConnector(command.Command):
|
||||
"""Unset baremetal volume connector properties."""
|
||||
log = logging.getLogger(__name__ + "UnsetBaremetalVolumeConnector")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = (
|
||||
super(UnsetBaremetalVolumeConnector, self).get_parser(prog_name))
|
||||
|
||||
parser.add_argument(
|
||||
'volume_connector',
|
||||
metavar='<volume connector>',
|
||||
help=_("UUID of the volume connector."))
|
||||
parser.add_argument(
|
||||
'--extra',
|
||||
dest='extra',
|
||||
metavar="<key>",
|
||||
action='append',
|
||||
help=_('Extra to unset (repeat option to unset multiple extras)'))
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
properties = []
|
||||
if parsed_args.extra:
|
||||
properties.extend(utils.args_array_to_patch('remove',
|
||||
['extra/' + x for x in parsed_args.extra]))
|
||||
|
||||
if properties:
|
||||
baremetal_client.volume_connector.update(
|
||||
parsed_args.volume_connector, properties)
|
||||
else:
|
||||
self.log.warning("Please specify what to unset.")
|
@ -1,412 +0,0 @@
|
||||
# Copyright 2017 FUJITSU LIMITED
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import utils as oscutils
|
||||
|
||||
from ironicclient.common.i18n import _
|
||||
from ironicclient.common import utils
|
||||
from ironicclient import exc
|
||||
from ironicclient.v1 import resource_fields as res_fields
|
||||
|
||||
|
||||
class CreateBaremetalVolumeTarget(command.ShowOne):
|
||||
"""Create a new baremetal volume target."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".CreateBaremetalVolumeTarget")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(CreateBaremetalVolumeTarget, self).get_parser(prog_name)
|
||||
|
||||
parser.add_argument(
|
||||
'--node',
|
||||
dest='node_uuid',
|
||||
metavar='<uuid>',
|
||||
required=True,
|
||||
help=_('UUID of the node that this volume target belongs to.'))
|
||||
parser.add_argument(
|
||||
'--type',
|
||||
dest='volume_type',
|
||||
metavar="<volume type>",
|
||||
required=True,
|
||||
help=_("Type of the volume target, e.g. 'iscsi', 'fibre_channel', "
|
||||
"'rbd'."))
|
||||
parser.add_argument(
|
||||
'--property',
|
||||
dest='properties',
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_("Key/value property related to the type of this volume "
|
||||
"target. Can be specified multiple times."
|
||||
))
|
||||
parser.add_argument(
|
||||
'--boot-index',
|
||||
dest='boot_index',
|
||||
metavar="<boot index>",
|
||||
type=int,
|
||||
required=True,
|
||||
help=_("Boot index of the volume target."))
|
||||
parser.add_argument(
|
||||
'--volume-id',
|
||||
dest='volume_id',
|
||||
metavar="<volume id>",
|
||||
required=True,
|
||||
help=_("ID of the volume associated with this target."))
|
||||
parser.add_argument(
|
||||
'--uuid',
|
||||
dest='uuid',
|
||||
metavar='<uuid>',
|
||||
help=_("UUID of the volume target."))
|
||||
parser.add_argument(
|
||||
'--extra',
|
||||
dest='extra',
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_("Record arbitrary key/value metadata. "
|
||||
"Can be specified multiple times."))
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)" % parsed_args)
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
if parsed_args.boot_index < 0:
|
||||
raise exc.CommandError(
|
||||
_('Expected non-negative --boot-index, got %s') %
|
||||
parsed_args.boot_index)
|
||||
|
||||
field_list = ['extra', 'volume_type', 'properties',
|
||||
'boot_index', 'node_uuid', 'volume_id', 'uuid']
|
||||
fields = dict((k, v) for (k, v) in vars(parsed_args).items()
|
||||
if k in field_list and v is not None)
|
||||
fields = utils.args_array_to_dict(fields, 'properties')
|
||||
fields = utils.args_array_to_dict(fields, 'extra')
|
||||
volume_target = baremetal_client.volume_target.create(**fields)
|
||||
|
||||
data = dict([(f, getattr(volume_target, f, '')) for f in
|
||||
res_fields.VOLUME_TARGET_DETAILED_RESOURCE.fields])
|
||||
return self.dict2columns(data)
|
||||
|
||||
|
||||
class ShowBaremetalVolumeTarget(command.ShowOne):
|
||||
"""Show baremetal volume target details."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ShowBaremetalVolumeTarget")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ShowBaremetalVolumeTarget, self).get_parser(prog_name)
|
||||
|
||||
parser.add_argument(
|
||||
'volume_target',
|
||||
metavar='<id>',
|
||||
help=_("UUID of the volume target."))
|
||||
parser.add_argument(
|
||||
'--fields',
|
||||
nargs='+',
|
||||
dest='fields',
|
||||
metavar='<field>',
|
||||
action='append',
|
||||
default=[],
|
||||
choices=res_fields.VOLUME_TARGET_DETAILED_RESOURCE.fields,
|
||||
help=_("One or more volume target fields. Only these fields will "
|
||||
"be fetched from the server."))
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
fields = list(itertools.chain.from_iterable(parsed_args.fields))
|
||||
fields = fields if fields else None
|
||||
|
||||
volume_target = baremetal_client.volume_target.get(
|
||||
parsed_args.volume_target, fields=fields)._info
|
||||
|
||||
volume_target.pop("links", None)
|
||||
return zip(*sorted(volume_target.items()))
|
||||
|
||||
|
||||
class ListBaremetalVolumeTarget(command.Lister):
|
||||
"""List baremetal volume targets."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".ListBaremetalVolumeTarget")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(ListBaremetalVolumeTarget, self).get_parser(prog_name)
|
||||
|
||||
parser.add_argument(
|
||||
'--node',
|
||||
dest='node',
|
||||
metavar='<node>',
|
||||
help=_("Only list volume targets of this node (name or UUID)."))
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
dest='limit',
|
||||
metavar='<limit>',
|
||||
type=int,
|
||||
help=_('Maximum number of volume targets to return per request, '
|
||||
'0 for no limit. Default is the maximum number used '
|
||||
'by the Baremetal API Service.'))
|
||||
parser.add_argument(
|
||||
'--marker',
|
||||
dest='marker',
|
||||
metavar='<volume target>',
|
||||
help=_('Volume target UUID (for example, of the last '
|
||||
'volume target in the list from a previous request). '
|
||||
'Returns the list of volume targets after this UUID.'))
|
||||
parser.add_argument(
|
||||
'--sort',
|
||||
dest='sort',
|
||||
metavar='<key>[:<direction>]',
|
||||
help=_('Sort output by specified volume target fields and '
|
||||
'directions (asc or desc) (default:asc). Multiple fields '
|
||||
'and directions can be specified, separated by comma.'))
|
||||
|
||||
display_group = parser.add_mutually_exclusive_group(required=False)
|
||||
display_group.add_argument(
|
||||
'--long',
|
||||
dest='detail',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_("Show detailed information about volume targets."))
|
||||
display_group.add_argument(
|
||||
'--fields',
|
||||
nargs='+',
|
||||
dest='fields',
|
||||
metavar='<field>',
|
||||
action='append',
|
||||
default=[],
|
||||
choices=res_fields.VOLUME_TARGET_DETAILED_RESOURCE.fields,
|
||||
help=_("One or more volume target fields. Only these fields will "
|
||||
"be fetched from the server. Can not be used when "
|
||||
"'--long' is specified."))
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)" % parsed_args)
|
||||
client = self.app.client_manager.baremetal
|
||||
|
||||
columns = res_fields.VOLUME_TARGET_RESOURCE.fields
|
||||
labels = res_fields.VOLUME_TARGET_RESOURCE.labels
|
||||
|
||||
params = {}
|
||||
if parsed_args.limit is not None and parsed_args.limit < 0:
|
||||
raise exc.CommandError(
|
||||
_('Expected non-negative --limit, got %s') %
|
||||
parsed_args.limit)
|
||||
params['limit'] = parsed_args.limit
|
||||
params['marker'] = parsed_args.marker
|
||||
if parsed_args.node is not None:
|
||||
params['node'] = parsed_args.node
|
||||
|
||||
if parsed_args.detail:
|
||||
params['detail'] = parsed_args.detail
|
||||
columns = res_fields.VOLUME_TARGET_DETAILED_RESOURCE.fields
|
||||
labels = res_fields.VOLUME_TARGET_DETAILED_RESOURCE.labels
|
||||
elif parsed_args.fields:
|
||||
params['detail'] = False
|
||||
fields = itertools.chain.from_iterable(parsed_args.fields)
|
||||
resource = res_fields.Resource(list(fields))
|
||||
columns = resource.fields
|
||||
labels = resource.labels
|
||||
params['fields'] = columns
|
||||
|
||||
self.log.debug("params(%s)" % params)
|
||||
data = client.volume_target.list(**params)
|
||||
|
||||
data = oscutils.sort_items(data, parsed_args.sort)
|
||||
|
||||
return (labels,
|
||||
(oscutils.get_item_properties(s, columns, formatters={
|
||||
'Properties': oscutils.format_dict},) for s in data))
|
||||
|
||||
|
||||
class DeleteBaremetalVolumeTarget(command.Command):
|
||||
"""Unregister baremetal volume target(s)."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".DeleteBaremetalVolumeTarget")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = (
|
||||
super(DeleteBaremetalVolumeTarget, self).get_parser(prog_name))
|
||||
parser.add_argument(
|
||||
'volume_targets',
|
||||
metavar='<volume target>',
|
||||
nargs='+',
|
||||
help=_("UUID(s) of the volume target(s) to delete."))
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
failures = []
|
||||
for volume_target in parsed_args.volume_targets:
|
||||
try:
|
||||
baremetal_client.volume_target.delete(volume_target)
|
||||
print(_('Deleted volume target %s') % volume_target)
|
||||
except exc.ClientException as e:
|
||||
failures.append(_("Failed to delete volume target "
|
||||
"%(volume_target)s: %(error)s")
|
||||
% {'volume_target': volume_target,
|
||||
'error': e})
|
||||
|
||||
if failures:
|
||||
raise exc.ClientException("\n".join(failures))
|
||||
|
||||
|
||||
class SetBaremetalVolumeTarget(command.Command):
|
||||
"""Set baremetal volume target properties."""
|
||||
|
||||
log = logging.getLogger(__name__ + ".SetBaremetalVolumeTarget")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = (
|
||||
super(SetBaremetalVolumeTarget, self).get_parser(prog_name))
|
||||
|
||||
parser.add_argument(
|
||||
'volume_target',
|
||||
metavar='<volume target>',
|
||||
help=_("UUID of the volume target."))
|
||||
parser.add_argument(
|
||||
'--node',
|
||||
dest='node_uuid',
|
||||
metavar='<uuid>',
|
||||
help=_('UUID of the node that this volume target belongs to.'))
|
||||
parser.add_argument(
|
||||
'--type',
|
||||
dest='volume_type',
|
||||
metavar="<volume type>",
|
||||
help=_("Type of the volume target, e.g. 'iscsi', 'fibre_channel', "
|
||||
"'rbd'."))
|
||||
parser.add_argument(
|
||||
'--property',
|
||||
dest='properties',
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_("Key/value property related to the type of this volume "
|
||||
"target. Can be specified multiple times."))
|
||||
parser.add_argument(
|
||||
'--boot-index',
|
||||
dest='boot_index',
|
||||
metavar="<boot index>",
|
||||
type=int,
|
||||
help=_("Boot index of the volume target."))
|
||||
parser.add_argument(
|
||||
'--volume-id',
|
||||
dest='volume_id',
|
||||
metavar="<volume id>",
|
||||
help=_("ID of the volume associated with this target."))
|
||||
parser.add_argument(
|
||||
'--extra',
|
||||
dest='extra',
|
||||
metavar="<key=value>",
|
||||
action='append',
|
||||
help=_("Record arbitrary key/value metadata. "
|
||||
"Can be specified multiple times."))
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
if parsed_args.boot_index is not None and parsed_args.boot_index < 0:
|
||||
raise exc.CommandError(
|
||||
_('Expected non-negative --boot-index, got %s') %
|
||||
parsed_args.boot_index)
|
||||
|
||||
properties = []
|
||||
if parsed_args.node_uuid:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["node_uuid=%s" % parsed_args.node_uuid]))
|
||||
if parsed_args.volume_type:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["volume_type=%s" % parsed_args.volume_type]))
|
||||
if parsed_args.boot_index:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["boot_index=%s" % parsed_args.boot_index]))
|
||||
if parsed_args.volume_id:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["volume_id=%s" % parsed_args.volume_id]))
|
||||
|
||||
if parsed_args.properties:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["properties/" + x for x in parsed_args.properties]))
|
||||
if parsed_args.extra:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'add', ["extra/" + x for x in parsed_args.extra]))
|
||||
|
||||
if properties:
|
||||
baremetal_client.volume_target.update(
|
||||
parsed_args.volume_target, properties)
|
||||
else:
|
||||
self.log.warning("Please specify what to set.")
|
||||
|
||||
|
||||
class UnsetBaremetalVolumeTarget(command.Command):
|
||||
"""Unset baremetal volume target properties."""
|
||||
log = logging.getLogger(__name__ + "UnsetBaremetalVolumeTarget")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = (
|
||||
super(UnsetBaremetalVolumeTarget, self).get_parser(prog_name))
|
||||
|
||||
parser.add_argument(
|
||||
'volume_target',
|
||||
metavar='<volume target>',
|
||||
help=_("UUID of the volume target."))
|
||||
parser.add_argument(
|
||||
'--extra',
|
||||
dest='extra',
|
||||
metavar="<key>",
|
||||
action='append',
|
||||
help=_('Extra to unset (repeat option to unset multiple extras)'))
|
||||
parser.add_argument(
|
||||
"--property",
|
||||
dest='properties',
|
||||
metavar="<key>",
|
||||
action='append',
|
||||
help='Property to unset on this baremetal volume target '
|
||||
'(repeat option to unset multiple properties).',
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)", parsed_args)
|
||||
|
||||
baremetal_client = self.app.client_manager.baremetal
|
||||
|
||||
properties = []
|
||||
if parsed_args.extra:
|
||||
properties.extend(utils.args_array_to_patch('remove',
|
||||
['extra/' + x for x in parsed_args.extra]))
|
||||
if parsed_args.properties:
|
||||
properties.extend(utils.args_array_to_patch(
|
||||
'remove', ['properties/' + x for x in parsed_args.properties]))
|
||||
|
||||
if properties:
|
||||
baremetal_client.volume_target.update(
|
||||
parsed_args.volume_target, properties)
|
||||
else:
|
||||
self.log.warning("Please specify what to unset.")
|
@ -1,460 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Command-line interface to the OpenStack Bare Metal Provisioning API.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import getpass
|
||||
import logging
|
||||
import os
|
||||
import pkgutil
|
||||
import re
|
||||
import sys
|
||||
|
||||
from keystoneauth1.loading import session as kasession
|
||||
from oslo_utils import encodeutils
|
||||
from oslo_utils import importutils
|
||||
import six
|
||||
|
||||
import ironicclient
|
||||
from ironicclient.common.apiclient import exceptions
|
||||
from ironicclient.common import cliutils
|
||||
from ironicclient.common import http
|
||||
from ironicclient.common.i18n import _
|
||||
from ironicclient.common import utils
|
||||
from ironicclient import exc
|
||||
|
||||
|
||||
LATEST_API_VERSION = ('1', 'latest')
|
||||
MISSING_VERSION_WARNING = (
|
||||
"You are using the default API version of the 'ironic' command. "
|
||||
"This is currently API version %s. In the future, the default will be "
|
||||
"the latest API version understood by both API and CLI. You can preserve "
|
||||
"the current behavior by passing the --ironic-api-version argument with "
|
||||
"the desired version or using the IRONIC_API_VERSION environment variable."
|
||||
)
|
||||
|
||||
|
||||
class IronicShell(object):
|
||||
|
||||
def get_base_parser(self):
|
||||
parser = argparse.ArgumentParser(
|
||||
prog='ironic',
|
||||
description=__doc__.strip(),
|
||||
epilog=_('See "ironic help COMMAND" '
|
||||
'for help on a specific command.'),
|
||||
add_help=False,
|
||||
formatter_class=HelpFormatter,
|
||||
)
|
||||
|
||||
# Register global Keystone args first so their defaults are respected.
|
||||
# See https://bugs.launchpad.net/python-ironicclient/+bug/1463581
|
||||
kasession.register_argparse_arguments(parser)
|
||||
|
||||
# Global arguments
|
||||
parser.add_argument('-h', '--help',
|
||||
action='store_true',
|
||||
help=argparse.SUPPRESS,
|
||||
)
|
||||
|
||||
parser.add_argument('--version',
|
||||
action='version',
|
||||
version=ironicclient.__version__)
|
||||
|
||||
parser.add_argument('--debug',
|
||||
default=bool(cliutils.env('IRONICCLIENT_DEBUG')),
|
||||
action='store_true',
|
||||
help=_('Defaults to env[IRONICCLIENT_DEBUG]'))
|
||||
|
||||
parser.add_argument('--json',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help=_('Print JSON response without formatting.'))
|
||||
|
||||
parser.add_argument('-v', '--verbose',
|
||||
default=False, action="store_true",
|
||||
help=_('Print more verbose output'))
|
||||
|
||||
# for backward compatibility only
|
||||
parser.add_argument('--cert-file',
|
||||
dest='os_cert',
|
||||
help=_('DEPRECATED! Use --os-cert.'))
|
||||
|
||||
# for backward compatibility only
|
||||
parser.add_argument('--key-file',
|
||||
dest='os_key',
|
||||
help=_('DEPRECATED! Use --os-key.'))
|
||||
|
||||
# for backward compatibility only
|
||||
parser.add_argument('--ca-file',
|
||||
dest='os_cacert',
|
||||
help=_('DEPRECATED! Use --os-cacert.'))
|
||||
|
||||
parser.add_argument('--os-username',
|
||||
default=cliutils.env('OS_USERNAME'),
|
||||
help=_('Defaults to env[OS_USERNAME]'))
|
||||
|
||||
parser.add_argument('--os_username',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument('--os-password',
|
||||
default=cliutils.env('OS_PASSWORD'),
|
||||
help=_('Defaults to env[OS_PASSWORD]'))
|
||||
|
||||
parser.add_argument('--os_password',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument('--os-tenant-id',
|
||||
default=cliutils.env('OS_TENANT_ID'),
|
||||
help=_('Defaults to env[OS_TENANT_ID]'))
|
||||
|
||||
parser.add_argument('--os_tenant_id',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument('--os-tenant-name',
|
||||
default=cliutils.env('OS_TENANT_NAME'),
|
||||
help=_('Defaults to env[OS_TENANT_NAME]'))
|
||||
|
||||
parser.add_argument('--os_tenant_name',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument('--os-auth-url',
|
||||
default=cliutils.env('OS_AUTH_URL'),
|
||||
help=_('Defaults to env[OS_AUTH_URL]'))
|
||||
|
||||
parser.add_argument('--os_auth_url',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument('--os-region-name',
|
||||
default=cliutils.env('OS_REGION_NAME'),
|
||||
help=_('Defaults to env[OS_REGION_NAME]'))
|
||||
|
||||
parser.add_argument('--os_region_name',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument('--os-auth-token',
|
||||
default=cliutils.env('OS_AUTH_TOKEN'),
|
||||
help=_('Defaults to env[OS_AUTH_TOKEN]'))
|
||||
|
||||
parser.add_argument('--os_auth_token',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument('--ironic-url',
|
||||
default=cliutils.env('IRONIC_URL'),
|
||||
help=_('Defaults to env[IRONIC_URL]'))
|
||||
|
||||
parser.add_argument('--ironic_url',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument('--ironic-api-version',
|
||||
default=cliutils.env('IRONIC_API_VERSION',
|
||||
default=None),
|
||||
help=_('Accepts 1.x (where "x" is microversion) '
|
||||
'or "latest". Defaults to '
|
||||
'env[IRONIC_API_VERSION] or %s. Starting '
|
||||
'with the Queens release this will '
|
||||
'default to "latest".') % http.DEFAULT_VER)
|
||||
|
||||
parser.add_argument('--ironic_api_version',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument('--os-service-type',
|
||||
default=cliutils.env('OS_SERVICE_TYPE'),
|
||||
help=_('Defaults to env[OS_SERVICE_TYPE] or '
|
||||
'"baremetal"'))
|
||||
|
||||
parser.add_argument('--os_service_type',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument('--os-endpoint',
|
||||
dest='ironic_url',
|
||||
default=cliutils.env('OS_SERVICE_ENDPOINT'),
|
||||
help=_('Specify an endpoint to use instead of '
|
||||
'retrieving one from the service catalog '
|
||||
'(via authentication). '
|
||||
'Defaults to env[OS_SERVICE_ENDPOINT].'))
|
||||
|
||||
parser.add_argument('--os_endpoint',
|
||||
dest='ironic_url',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument('--os-endpoint-type',
|
||||
default=cliutils.env('OS_ENDPOINT_TYPE'),
|
||||
help=_('Defaults to env[OS_ENDPOINT_TYPE] or '
|
||||
'"publicURL"'))
|
||||
|
||||
parser.add_argument('--os_endpoint_type',
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument('--os-user-domain-id',
|
||||
default=cliutils.env('OS_USER_DOMAIN_ID'),
|
||||
help=_('Defaults to env[OS_USER_DOMAIN_ID].'))
|
||||
|
||||
parser.add_argument('--os-user-domain-name',
|
||||
default=cliutils.env('OS_USER_DOMAIN_NAME'),
|
||||
help=_('Defaults to env[OS_USER_DOMAIN_NAME].'))
|
||||
|
||||
parser.add_argument('--os-project-id',
|
||||
default=cliutils.env('OS_PROJECT_ID'),
|
||||
help=_('Another way to specify tenant ID. '
|
||||
'This option is mutually exclusive with '
|
||||
' --os-tenant-id. '
|
||||
'Defaults to env[OS_PROJECT_ID].'))
|
||||
|
||||
parser.add_argument('--os-project-name',
|
||||
default=cliutils.env('OS_PROJECT_NAME'),
|
||||
help=_('Another way to specify tenant name. '
|
||||
'This option is mutually exclusive with '
|
||||
' --os-tenant-name. '
|
||||
'Defaults to env[OS_PROJECT_NAME].'))
|
||||
|
||||
parser.add_argument('--os-project-domain-id',
|
||||
default=cliutils.env('OS_PROJECT_DOMAIN_ID'),
|
||||
help=_('Defaults to env[OS_PROJECT_DOMAIN_ID].'))
|
||||
|
||||
parser.add_argument('--os-project-domain-name',
|
||||
default=cliutils.env('OS_PROJECT_DOMAIN_NAME'),
|
||||
help=_('Defaults to env[OS_PROJECT_DOMAIN_NAME].'))
|
||||
|
||||
msg = _('Maximum number of retries in case of conflict error '
|
||||
'(HTTP 409). Defaults to env[IRONIC_MAX_RETRIES] or %d. '
|
||||
'Use 0 to disable retrying.') % http.DEFAULT_MAX_RETRIES
|
||||
parser.add_argument('--max-retries', type=int, help=msg,
|
||||
default=cliutils.env(
|
||||
'IRONIC_MAX_RETRIES',
|
||||
default=str(http.DEFAULT_MAX_RETRIES)))
|
||||
|
||||
msg = _('Amount of time (in seconds) between retries '
|
||||
'in case of conflict error (HTTP 409). '
|
||||
'Defaults to env[IRONIC_RETRY_INTERVAL] '
|
||||
'or %d.') % http.DEFAULT_RETRY_INTERVAL
|
||||
parser.add_argument('--retry-interval', type=int, help=msg,
|
||||
default=cliutils.env(
|
||||
'IRONIC_RETRY_INTERVAL',
|
||||
default=str(http.DEFAULT_RETRY_INTERVAL)))
|
||||
|
||||
return parser
|
||||
|
||||
def get_available_major_versions(self):
|
||||
matcher = re.compile(r"^v[0-9]+$")
|
||||
submodules = pkgutil.iter_modules([os.path.dirname(__file__)])
|
||||
available_versions = [name[1:] for loader, name, ispkg in submodules
|
||||
if matcher.search(name)]
|
||||
|
||||
return available_versions
|
||||
|
||||
def get_subcommand_parser(self, version):
|
||||
parser = self.get_base_parser()
|
||||
|
||||
self.subcommands = {}
|
||||
subparsers = parser.add_subparsers(metavar='<subcommand>',
|
||||
dest='subparser_name')
|
||||
try:
|
||||
submodule = importutils.import_versioned_module('ironicclient',
|
||||
version, 'shell')
|
||||
except ImportError as e:
|
||||
msg = _("Invalid client version '%(version)s'. "
|
||||
"Major part must be one of: '%(major)s'") % {
|
||||
"version": version,
|
||||
"major": ", ".join(self.get_available_major_versions())}
|
||||
raise exceptions.UnsupportedVersion(
|
||||
_('%(message)s, error was: %(error)s') %
|
||||
{'message': msg, 'error': e})
|
||||
submodule.enhance_parser(parser, subparsers, self.subcommands)
|
||||
utils.define_commands_from_module(subparsers, self, self.subcommands)
|
||||
return parser
|
||||
|
||||
def _setup_debugging(self, debug):
|
||||
if debug:
|
||||
logging.basicConfig(
|
||||
format="%(levelname)s (%(module)s:%(lineno)d) %(message)s",
|
||||
level=logging.DEBUG)
|
||||
else:
|
||||
logging.basicConfig(
|
||||
format="%(levelname)s %(message)s",
|
||||
level=logging.CRITICAL)
|
||||
|
||||
def do_bash_completion(self):
|
||||
"""Prints all of the commands and options for bash-completion."""
|
||||
commands = set()
|
||||
options = set()
|
||||
for sc_str, sc in self.subcommands.items():
|
||||
commands.add(sc_str)
|
||||
for option in sc._optionals._option_string_actions.keys():
|
||||
options.add(option)
|
||||
|
||||
commands.remove('bash-completion')
|
||||
print(' '.join(commands | options))
|
||||
|
||||
def _check_version(self, api_version):
|
||||
if api_version == 'latest':
|
||||
return LATEST_API_VERSION
|
||||
else:
|
||||
if api_version is None:
|
||||
print(MISSING_VERSION_WARNING % http.DEFAULT_VER,
|
||||
file=sys.stderr)
|
||||
api_version = '1'
|
||||
|
||||
try:
|
||||
versions = tuple(int(i) for i in api_version.split('.'))
|
||||
except ValueError:
|
||||
versions = ()
|
||||
if len(versions) == 1:
|
||||
# Default value of ironic_api_version is '1'.
|
||||
# If user not specify the value of api version, not passing
|
||||
# headers at all.
|
||||
os_ironic_api_version = None
|
||||
elif len(versions) == 2:
|
||||
os_ironic_api_version = api_version
|
||||
# In the case of '1.0'
|
||||
if versions[1] == 0:
|
||||
os_ironic_api_version = None
|
||||
else:
|
||||
msg = _("The requested API version %(ver)s is an unexpected "
|
||||
"format. Acceptable formats are 'X', 'X.Y', or the "
|
||||
"literal string '%(latest)s'."
|
||||
) % {'ver': api_version, 'latest': 'latest'}
|
||||
raise exc.CommandError(msg)
|
||||
|
||||
api_major_version = versions[0]
|
||||
return (api_major_version, os_ironic_api_version)
|
||||
|
||||
def main(self, argv):
|
||||
# Parse args once to find version
|
||||
parser = self.get_base_parser()
|
||||
(options, args) = parser.parse_known_args(argv)
|
||||
self._setup_debugging(options.debug)
|
||||
|
||||
# build available subcommands based on version
|
||||
(api_major_version, os_ironic_api_version) = (
|
||||
self._check_version(options.ironic_api_version))
|
||||
|
||||
subcommand_parser = self.get_subcommand_parser(api_major_version)
|
||||
self.parser = subcommand_parser
|
||||
|
||||
# Handle top-level --help/-h before attempting to parse
|
||||
# a command off the command line
|
||||
if options.help or not argv:
|
||||
self.do_help(options)
|
||||
return 0
|
||||
|
||||
# Parse args again and call whatever callback was selected
|
||||
args = subcommand_parser.parse_args(argv)
|
||||
|
||||
# Short-circuit and deal with these commands right away.
|
||||
if args.func == self.do_help:
|
||||
self.do_help(args)
|
||||
return 0
|
||||
elif args.func == self.do_bash_completion:
|
||||
self.do_bash_completion()
|
||||
return 0
|
||||
|
||||
if not (args.os_auth_token and (args.ironic_url or args.os_auth_url)):
|
||||
if not args.os_username:
|
||||
raise exc.CommandError(_("You must provide a username via "
|
||||
"either --os-username or via "
|
||||
"env[OS_USERNAME]"))
|
||||
|
||||
if not args.os_password:
|
||||
# No password, If we've got a tty, try prompting for it
|
||||
if hasattr(sys.stdin, 'isatty') and sys.stdin.isatty():
|
||||
# Check for Ctl-D
|
||||
try:
|
||||
args.os_password = getpass.getpass(
|
||||
'OpenStack Password: ')
|
||||
except EOFError:
|
||||
pass
|
||||
# No password because we didn't have a tty or the
|
||||
# user Ctl-D when prompted.
|
||||
if not args.os_password:
|
||||
raise exc.CommandError(_("You must provide a password via "
|
||||
"either --os-password, "
|
||||
"env[OS_PASSWORD], "
|
||||
"or prompted response"))
|
||||
|
||||
if not (args.os_tenant_id or args.os_tenant_name or
|
||||
args.os_project_id or args.os_project_name):
|
||||
raise exc.CommandError(
|
||||
_("You must provide a project name or"
|
||||
" project id via --os-project-name, --os-project-id,"
|
||||
" env[OS_PROJECT_ID] or env[OS_PROJECT_NAME]. You may"
|
||||
" use os-project and os-tenant interchangeably."))
|
||||
|
||||
if not args.os_auth_url:
|
||||
raise exc.CommandError(_("You must provide an auth url via "
|
||||
"either --os-auth-url or via "
|
||||
"env[OS_AUTH_URL]"))
|
||||
|
||||
if args.max_retries < 0:
|
||||
raise exc.CommandError(_("You must provide value >= 0 for "
|
||||
"--max-retries"))
|
||||
if args.retry_interval < 1:
|
||||
raise exc.CommandError(_("You must provide value >= 1 for "
|
||||
"--retry-interval"))
|
||||
client_args = (
|
||||
'os_auth_token', 'ironic_url', 'os_username', 'os_password',
|
||||
'os_auth_url', 'os_project_id', 'os_project_name', 'os_tenant_id',
|
||||
'os_tenant_name', 'os_region_name', 'os_user_domain_id',
|
||||
'os_user_domain_name', 'os_project_domain_id',
|
||||
'os_project_domain_name', 'os_service_type', 'os_endpoint_type',
|
||||
'os_cacert', 'os_cert', 'os_key', 'max_retries', 'retry_interval',
|
||||
'timeout', 'insecure'
|
||||
)
|
||||
kwargs = {}
|
||||
for key in client_args:
|
||||
kwargs[key] = getattr(args, key)
|
||||
kwargs['os_ironic_api_version'] = os_ironic_api_version
|
||||
client = ironicclient.client.get_client(api_major_version, **kwargs)
|
||||
|
||||
try:
|
||||
args.func(client, args)
|
||||
except exc.Unauthorized:
|
||||
raise exc.CommandError(_("Invalid OpenStack Identity credentials"))
|
||||
except exc.CommandError as e:
|
||||
subcommand_parser = self.subcommands[args.subparser_name]
|
||||
subcommand_parser.error(e)
|
||||
|
||||
@cliutils.arg('command', metavar='<subcommand>', nargs='?',
|
||||
help=_('Display help for <subcommand>'))
|
||||
def do_help(self, args):
|
||||
"""Display help about this program or one of its subcommands."""
|
||||
if getattr(args, 'command', None):
|
||||
if args.command in self.subcommands:
|
||||
self.subcommands[args.command].print_help()
|
||||
else:
|
||||
raise exc.CommandError(_("'%s' is not a valid subcommand") %
|
||||
args.command)
|
||||
else:
|
||||
self.parser.print_help()
|
||||
|
||||
|
||||
class HelpFormatter(argparse.HelpFormatter):
|
||||
def start_section(self, heading):
|
||||
super(HelpFormatter, self).start_section(heading.capitalize())
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
IronicShell().main(sys.argv[1:])
|
||||
except KeyboardInterrupt:
|
||||
print(_("... terminating ironic client"), file=sys.stderr)
|
||||
return 130
|
||||
except Exception as e:
|
||||
print(encodeutils.safe_encode(six.text_type(e)), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
@ -1,445 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
|
||||
import six
|
||||
import six.moves.configparser as config_parser
|
||||
from tempest.lib.cli import base
|
||||
from tempest.lib.common.utils import data_utils
|
||||
from tempest.lib import exceptions
|
||||
|
||||
import ironicclient.tests.functional.utils as utils
|
||||
|
||||
DEFAULT_CONFIG_FILE = os.path.join(os.path.dirname(__file__), 'test.conf')
|
||||
|
||||
|
||||
class FunctionalTestBase(base.ClientTestBase):
|
||||
"""Ironic base class, calls to ironicclient."""
|
||||
|
||||
def setUp(self):
|
||||
super(FunctionalTestBase, self).setUp()
|
||||
self.client = self._get_clients()
|
||||
# NOTE(kromanenko) set ironic api version for portgroups
|
||||
self.pg_api_ver = '--ironic-api-version 1.25'
|
||||
|
||||
def _get_clients(self):
|
||||
# NOTE(aarefiev): {toxinidir} is a current working directory, so
|
||||
# the tox env path is {toxinidir}/.tox
|
||||
cli_dir = os.path.join(os.path.abspath('.'), '.tox/functional/bin')
|
||||
|
||||
config = self._get_config()
|
||||
if config.get('os_auth_url'):
|
||||
client = base.CLIClient(cli_dir=cli_dir,
|
||||
username=config['os_username'],
|
||||
password=config['os_password'],
|
||||
tenant_name=config['os_project_name'],
|
||||
uri=config['os_auth_url'])
|
||||
for keystone_object in 'user', 'project':
|
||||
domain_attr = 'os_%s_domain_id' % keystone_object
|
||||
if config.get(domain_attr):
|
||||
setattr(self, domain_attr, config[domain_attr])
|
||||
else:
|
||||
self.ironic_url = config['ironic_url']
|
||||
self.os_auth_token = config['os_auth_token']
|
||||
client = base.CLIClient(cli_dir=cli_dir,
|
||||
ironic_url=self.ironic_url,
|
||||
os_auth_token=self.os_auth_token)
|
||||
return client
|
||||
|
||||
def _get_config(self):
|
||||
config_file = os.environ.get('IRONICCLIENT_TEST_CONFIG',
|
||||
DEFAULT_CONFIG_FILE)
|
||||
# SafeConfigParser was deprecated in Python 3.2
|
||||
if six.PY3:
|
||||
config = config_parser.ConfigParser()
|
||||
else:
|
||||
config = config_parser.SafeConfigParser()
|
||||
if not config.read(config_file):
|
||||
self.skipTest('Skipping, no test config found @ %s' % config_file)
|
||||
try:
|
||||
auth_strategy = config.get('functional', 'auth_strategy')
|
||||
except config_parser.NoOptionError:
|
||||
auth_strategy = 'keystone'
|
||||
if auth_strategy not in ['keystone', 'noauth']:
|
||||
raise self.fail(
|
||||
'Invalid auth type specified: %s in functional must be '
|
||||
'one of: [keystone, noauth]' % auth_strategy)
|
||||
|
||||
conf_settings = []
|
||||
keystone_v3_conf_settings = []
|
||||
if auth_strategy == 'keystone':
|
||||
conf_settings += ['os_auth_url', 'os_username',
|
||||
'os_password', 'os_project_name']
|
||||
keystone_v3_conf_settings += ['os_user_domain_id',
|
||||
'os_project_domain_id']
|
||||
else:
|
||||
conf_settings += ['os_auth_token', 'ironic_url']
|
||||
|
||||
cli_flags = {}
|
||||
missing = []
|
||||
for c in conf_settings + keystone_v3_conf_settings:
|
||||
try:
|
||||
cli_flags[c] = config.get('functional', c)
|
||||
except config_parser.NoOptionError:
|
||||
# NOTE(vdrok): Here we ignore the absence of KS v3 options as
|
||||
# v2 may be used. Keystone client will do the actual check of
|
||||
# the parameters' correctness.
|
||||
if c not in keystone_v3_conf_settings:
|
||||
missing.append(c)
|
||||
if missing:
|
||||
self.fail('Missing required setting in test.conf (%(conf)s) for '
|
||||
'auth_strategy=%(auth)s: %(missing)s' %
|
||||
{'conf': config_file,
|
||||
'auth': auth_strategy,
|
||||
'missing': ','.join(missing)})
|
||||
return cli_flags
|
||||
|
||||
def _cmd_no_auth(self, cmd, action, flags='', params=''):
|
||||
"""Execute given command with noauth attributes.
|
||||
|
||||
:param cmd: command to be executed
|
||||
:type cmd: string
|
||||
:param action: command on cli to run
|
||||
:type action: string
|
||||
:param flags: optional cli flags to use
|
||||
:type flags: string
|
||||
:param params: optional positional args to use
|
||||
:type params: string
|
||||
"""
|
||||
flags = ('--os_auth_token %(token)s --ironic_url %(url)s %(flags)s'
|
||||
%
|
||||
{'token': self.os_auth_token,
|
||||
'url': self.ironic_url,
|
||||
'flags': flags})
|
||||
return base.execute(cmd, action, flags, params,
|
||||
cli_dir=self.client.cli_dir)
|
||||
|
||||
def _ironic(self, action, flags='', params='', merge_stderr=False):
|
||||
"""Execute ironic command for the given action.
|
||||
|
||||
:param action: the cli command to run using Ironic
|
||||
:type action: string
|
||||
:param flags: any optional cli flags to use
|
||||
:type flags: string
|
||||
:param params: any optional positional args to use
|
||||
:type params: string
|
||||
:param merge_stderr: whether to merge stderr into the result
|
||||
:type merge_stderr: bool
|
||||
"""
|
||||
flags += ' --os-endpoint-type publicURL'
|
||||
if hasattr(self, 'os_auth_token'):
|
||||
return self._cmd_no_auth('ironic', action, flags, params)
|
||||
else:
|
||||
for keystone_object in 'user', 'project':
|
||||
domain_attr = 'os_%s_domain_id' % keystone_object
|
||||
if hasattr(self, domain_attr):
|
||||
flags += ' --os-%(ks_obj)s-domain-id %(value)s' % {
|
||||
'ks_obj': keystone_object,
|
||||
'value': getattr(self, domain_attr)
|
||||
}
|
||||
return self.client.cmd_with_auth('ironic',
|
||||
action, flags, params,
|
||||
merge_stderr=merge_stderr)
|
||||
|
||||
def _ironic_osc(self, action, flags='', params='', merge_stderr=False):
|
||||
"""Execute baremetal commands via OpenStack Client."""
|
||||
config = self._get_config()
|
||||
id_api_version = config.get('functional', 'os_identity_api_version')
|
||||
flags += ' --os-identity-api-version {0}'.format(id_api_version)
|
||||
|
||||
for keystone_object in 'user', 'project':
|
||||
domain_attr = 'os_%s_domain_id' % keystone_object
|
||||
if hasattr(self, domain_attr):
|
||||
flags += ' --os-%(ks_obj)s-domain-id %(value)s' % {
|
||||
'ks_obj': keystone_object,
|
||||
'value': getattr(self, domain_attr)
|
||||
}
|
||||
return self.client.cmd_with_auth(
|
||||
'openstack', action, flags, params, merge_stderr=merge_stderr)
|
||||
|
||||
def ironic(self, action, flags='', params='', parse=True):
|
||||
"""Return parsed list of dicts with basic item info.
|
||||
|
||||
:param action: the cli command to run using Ironic
|
||||
:type action: string
|
||||
:param flags: any optional cli flags to use
|
||||
:type flags: string
|
||||
:param params: any optional positional args to use
|
||||
:type params: string
|
||||
:param parse: return parsed list or raw output
|
||||
:type parse: bool
|
||||
"""
|
||||
output = self._ironic(action=action, flags=flags, params=params)
|
||||
return self.parser.listing(output) if parse else output
|
||||
|
||||
def get_table_headers(self, action, flags='', params=''):
|
||||
output = self._ironic(action=action, flags=flags, params=params)
|
||||
table = self.parser.table(output)
|
||||
return table['headers']
|
||||
|
||||
def assertTableHeaders(self, field_names, table_headers):
|
||||
"""Assert that field_names and table_headers are equal.
|
||||
|
||||
:param field_names: field names from the output table of the cmd
|
||||
:param table_headers: table headers output from cmd
|
||||
"""
|
||||
self.assertEqual(sorted(field_names), sorted(table_headers))
|
||||
|
||||
def assertNodeStates(self, node_show, node_show_states):
|
||||
"""Assert that node_show_states output corresponds to node_show output.
|
||||
|
||||
:param node_show: output from node-show cmd
|
||||
:param node_show_states: output from node-show-states cmd
|
||||
"""
|
||||
for key in node_show_states.keys():
|
||||
self.assertEqual(node_show_states[key], node_show[key])
|
||||
|
||||
def assertNodeValidate(self, node_validate):
|
||||
"""Assert that all interfaces present are valid.
|
||||
|
||||
:param node_validate: output from node-validate cmd
|
||||
"""
|
||||
self.assertNotIn('False', [x['Result'] for x in node_validate])
|
||||
|
||||
def delete_node(self, node_id):
|
||||
"""Delete node method works only with fake driver.
|
||||
|
||||
:param node_id: node uuid
|
||||
:raises: CommandFailed exception when command fails to delete a node
|
||||
"""
|
||||
node_list = self.list_nodes()
|
||||
|
||||
if utils.get_object(node_list, node_id):
|
||||
node_show = self.show_node(node_id)
|
||||
if node_show['provision_state'] != 'available':
|
||||
self.ironic('node-set-provision-state',
|
||||
params='{0} deleted'.format(node_id))
|
||||
if node_show['power_state'] not in ('None', 'off'):
|
||||
self.ironic('node-set-power-state',
|
||||
params='{0} off'.format(node_id))
|
||||
self.ironic('node-delete', params=node_id)
|
||||
|
||||
node_list_uuid = self.get_nodes_uuids_from_node_list()
|
||||
if node_id in node_list_uuid:
|
||||
self.fail('Ironic node {0} has not been deleted!'
|
||||
.format(node_id))
|
||||
|
||||
def create_node(self, driver='fake', params=''):
|
||||
node = self.ironic('node-create',
|
||||
params='--driver {0} {1}'.format(driver, params))
|
||||
|
||||
if not node:
|
||||
self.fail('Ironic node has not been created!')
|
||||
|
||||
node = utils.get_dict_from_output(node)
|
||||
self.addCleanup(self.delete_node, node['uuid'])
|
||||
return node
|
||||
|
||||
def show_node(self, node_id, params=''):
|
||||
node_show = self.ironic('node-show',
|
||||
params='{0} {1}'.format(node_id, params))
|
||||
return utils.get_dict_from_output(node_show)
|
||||
|
||||
def list_nodes(self, params=''):
|
||||
return self.ironic('node-list', params=params)
|
||||
|
||||
def update_node(self, node_id, params):
|
||||
updated_node = self.ironic('node-update',
|
||||
params='{0} {1}'.format(node_id, params))
|
||||
return utils.get_dict_from_output(updated_node)
|
||||
|
||||
def get_nodes_uuids_from_node_list(self):
|
||||
node_list = self.list_nodes()
|
||||
return [x['UUID'] for x in node_list]
|
||||
|
||||
def show_node_states(self, node_id):
|
||||
show_node_states = self.ironic('node-show-states', params=node_id)
|
||||
return utils.get_dict_from_output(show_node_states)
|
||||
|
||||
def set_node_maintenance(self, node_id, maintenance_mode, params=''):
|
||||
self.ironic(
|
||||
'node-set-maintenance',
|
||||
params='{0} {1} {2}'.format(node_id, maintenance_mode, params))
|
||||
|
||||
def set_node_power_state(self, node_id, power_state, params=''):
|
||||
self.ironic('node-set-power-state',
|
||||
params='{0} {1} {2}'
|
||||
.format(node_id, power_state, params))
|
||||
|
||||
def set_node_provision_state(self, node_id, provision_state, params=''):
|
||||
self.ironic('node-set-provision-state',
|
||||
params='{0} {1} {2}'
|
||||
.format(node_id, provision_state, params))
|
||||
|
||||
def validate_node(self, node_id):
|
||||
return self.ironic('node-validate', params=node_id)
|
||||
|
||||
def list_node_chassis(self, chassis_uuid, params=''):
|
||||
return self.ironic('chassis-node-list',
|
||||
params='{0} {1}'.format(chassis_uuid, params))
|
||||
|
||||
def get_nodes_uuids_from_chassis_node_list(self, chassis_uuid):
|
||||
chassis_node_list = self.list_node_chassis(chassis_uuid)
|
||||
return [x['UUID'] for x in chassis_node_list]
|
||||
|
||||
def list_driver(self, params=''):
|
||||
return self.ironic('driver-list', params=params)
|
||||
|
||||
def show_driver(self, driver_name):
|
||||
driver_show = self.ironic('driver-show', params=driver_name)
|
||||
return utils.get_dict_from_output(driver_show)
|
||||
|
||||
def properties_driver(self, driver_name):
|
||||
return self.ironic('driver-properties', params=driver_name)
|
||||
|
||||
def get_drivers_names(self):
|
||||
driver_list = self.list_driver()
|
||||
return [x['Supported driver(s)'] for x in driver_list]
|
||||
|
||||
def delete_chassis(self, chassis_id, ignore_exceptions=False):
|
||||
try:
|
||||
self.ironic('chassis-delete', params=chassis_id)
|
||||
except exceptions.CommandFailed:
|
||||
if not ignore_exceptions:
|
||||
raise
|
||||
|
||||
def get_chassis_uuids_from_chassis_list(self):
|
||||
chassis_list = self.list_chassis()
|
||||
return [x['UUID'] for x in chassis_list]
|
||||
|
||||
def create_chassis(self, params=''):
|
||||
chassis = self.ironic('chassis-create', params=params)
|
||||
|
||||
if not chassis:
|
||||
self.fail('Ironic chassis has not been created!')
|
||||
|
||||
chassis = utils.get_dict_from_output(chassis)
|
||||
self.addCleanup(self.delete_chassis,
|
||||
chassis['uuid'],
|
||||
ignore_exceptions=True)
|
||||
return chassis
|
||||
|
||||
def list_chassis(self, params=''):
|
||||
return self.ironic('chassis-list', params=params)
|
||||
|
||||
def show_chassis(self, chassis_id, params=''):
|
||||
chassis_show = self.ironic('chassis-show',
|
||||
params='{0} {1}'.format(chassis_id, params))
|
||||
return utils.get_dict_from_output(chassis_show)
|
||||
|
||||
def update_chassis(self, chassis_id, operation, params=''):
|
||||
updated_chassis = self.ironic(
|
||||
'chassis-update',
|
||||
params='{0} {1} {2}'.format(chassis_id, operation, params))
|
||||
return utils.get_dict_from_output(updated_chassis)
|
||||
|
||||
def delete_port(self, port_id, ignore_exceptions=False):
|
||||
try:
|
||||
self.ironic('port-delete', params=port_id)
|
||||
except exceptions.CommandFailed:
|
||||
if not ignore_exceptions:
|
||||
raise
|
||||
|
||||
def create_port(self,
|
||||
node_id,
|
||||
mac_address=None,
|
||||
flags='',
|
||||
params=''):
|
||||
|
||||
if mac_address is None:
|
||||
mac_address = data_utils.rand_mac_address()
|
||||
|
||||
port = self.ironic('port-create',
|
||||
flags=flags,
|
||||
params='--address {0} --node {1} {2}'
|
||||
.format(mac_address, node_id, params))
|
||||
if not port:
|
||||
self.fail('Ironic port has not been created!')
|
||||
|
||||
return utils.get_dict_from_output(port)
|
||||
|
||||
def list_ports(self, params=''):
|
||||
return self.ironic('port-list', params=params)
|
||||
|
||||
def show_port(self, port_id, params=''):
|
||||
port_show = self.ironic('port-show', params='{0} {1}'
|
||||
.format(port_id, params))
|
||||
return utils.get_dict_from_output(port_show)
|
||||
|
||||
def get_uuids_from_port_list(self):
|
||||
port_list = self.list_ports()
|
||||
return [x['UUID'] for x in port_list]
|
||||
|
||||
def update_port(self, port_id, operation, flags='', params=''):
|
||||
updated_port = self.ironic('port-update',
|
||||
flags=flags,
|
||||
params='{0} {1} {2}'
|
||||
.format(port_id, operation, params))
|
||||
return utils.get_dict_from_output(updated_port)
|
||||
|
||||
def create_portgroup(self, node_id, params=''):
|
||||
"""Create a new portgroup."""
|
||||
portgroup = self.ironic('portgroup-create',
|
||||
flags=self.pg_api_ver,
|
||||
params='--node {0} {1}'
|
||||
.format(node_id, params))
|
||||
if not portgroup:
|
||||
self.fail('Ironic portgroup failed to create!')
|
||||
portgroup = utils.get_dict_from_output(portgroup)
|
||||
self.addCleanup(self.delete_portgroup, portgroup['uuid'],
|
||||
ignore_exceptions=True)
|
||||
return portgroup
|
||||
|
||||
def delete_portgroup(self, portgroup_id, ignore_exceptions=False):
|
||||
"""Delete a port group."""
|
||||
try:
|
||||
self.ironic('portgroup-delete',
|
||||
flags=self.pg_api_ver,
|
||||
params=portgroup_id)
|
||||
except exceptions.CommandFailed:
|
||||
if not ignore_exceptions:
|
||||
raise
|
||||
|
||||
def list_portgroups(self, params=''):
|
||||
"""List the port groups."""
|
||||
return self.ironic('portgroup-list',
|
||||
flags=self.pg_api_ver,
|
||||
params=params)
|
||||
|
||||
def show_portgroup(self, portgroup_id, params=''):
|
||||
"""Show detailed information about a port group."""
|
||||
portgroup_show = self.ironic('portgroup-show',
|
||||
flags=self.pg_api_ver,
|
||||
params='{0} {1}'
|
||||
.format(portgroup_id, params))
|
||||
return utils.get_dict_from_output(portgroup_show)
|
||||
|
||||
def update_portgroup(self, portgroup_id, op, params=''):
|
||||
"""Update information about a port group."""
|
||||
updated_portgroup = self.ironic('portgroup-update',
|
||||
flags=self.pg_api_ver,
|
||||
params='{0} {1} {2}'
|
||||
.format(portgroup_id, op, params))
|
||||
return utils.get_dict_from_output(updated_portgroup)
|
||||
|
||||
def get_portgroup_uuids_from_portgroup_list(self):
|
||||
"""Get UUIDs from list of port groups."""
|
||||
portgroup_list = self.list_portgroups()
|
||||
return [x['UUID'] for x in portgroup_list]
|
||||
|
||||
def portgroup_port_list(self, portgroup_id, params=''):
|
||||
"""List the ports associated with a port group."""
|
||||
return self.ironic('portgroup-port-list', flags=self.pg_api_ver,
|
||||
params='{0} {1}'.format(portgroup_id, params))
|
@ -1,49 +0,0 @@
|
||||
#!/bin/bash -xe
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# This script is executed inside post_test_hook function in devstack gate.
|
||||
|
||||
function generate_testr_results {
|
||||
if [ -f .testrepository/0 ]; then
|
||||
sudo .tox/functional/bin/testr last --subunit > $WORKSPACE/testrepository.subunit
|
||||
sudo mv $WORKSPACE/testrepository.subunit $BASE/logs/testrepository.subunit
|
||||
sudo /usr/os-testr-env/bin/subunit2html $BASE/logs/testrepository.subunit $BASE/logs/testr_results.html
|
||||
sudo gzip -9 $BASE/logs/testrepository.subunit
|
||||
sudo gzip -9 $BASE/logs/testr_results.html
|
||||
sudo chown jenkins:jenkins $BASE/logs/testrepository.subunit.gz $BASE/logs/testr_results.html.gz
|
||||
sudo chmod a+r $BASE/logs/testrepository.subunit.gz $BASE/logs/testr_results.html.gz
|
||||
fi
|
||||
}
|
||||
|
||||
export IRONICCLIENT_DIR="$BASE/new/python-ironicclient"
|
||||
|
||||
sudo chown -R jenkins:stack $IRONICCLIENT_DIR
|
||||
|
||||
cd $IRONICCLIENT_DIR
|
||||
|
||||
# Run tests
|
||||
echo "Running ironicclient functional test suite"
|
||||
set +e
|
||||
|
||||
# Only admin credentials needed for ironic api
|
||||
source $BASE/new/devstack/openrc admin admin
|
||||
|
||||
# Preserve env for OS_ credentials
|
||||
sudo -E -H -u jenkins ./tools/run_functional.sh
|
||||
EXIT_CODE=$?
|
||||
set -e
|
||||
|
||||
# Collect and parse result
|
||||
generate_testr_results
|
||||
exit $EXIT_CODE
|
@ -1,286 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
|
||||
from tempest.lib.common.utils import data_utils
|
||||
from tempest.lib import exceptions
|
||||
|
||||
from ironicclient.tests.functional import base
|
||||
|
||||
|
||||
class TestCase(base.FunctionalTestBase):
|
||||
|
||||
def openstack(self, *args, **kwargs):
|
||||
return self._ironic_osc(*args, **kwargs)
|
||||
|
||||
def get_opts(self, fields=None, output_format='json'):
|
||||
"""Get options for OSC output fields format.
|
||||
|
||||
:param List fields: List of fields to get
|
||||
:param String output_format: Select output format
|
||||
:return: String of formatted options
|
||||
"""
|
||||
if not fields:
|
||||
return ' -f {0}'.format(output_format)
|
||||
return ' -f {0} {1}'.format(output_format,
|
||||
' '.join(['-c ' + it for it in fields]))
|
||||
|
||||
@staticmethod
|
||||
def construct_cmd(*parts):
|
||||
return ' '.join(str(x) for x in parts)
|
||||
|
||||
def node_create(self, driver='fake', name=None, params=''):
|
||||
"""Create baremetal node and add cleanup.
|
||||
|
||||
:param String driver: Driver for a new node
|
||||
:param String name: Name for a new node
|
||||
:param String params: Additional args and kwargs
|
||||
:return: JSON object of created node
|
||||
"""
|
||||
if not name:
|
||||
name = data_utils.rand_name('baremetal')
|
||||
|
||||
opts = self.get_opts()
|
||||
output = self.openstack('baremetal node create {0} '
|
||||
'--driver {1} --name {2} {3}'
|
||||
.format(opts, driver, name, params))
|
||||
node = json.loads(output)
|
||||
self.addCleanup(self.node_delete, node['uuid'], True)
|
||||
if not output:
|
||||
self.fail('Baremetal node has not been created!')
|
||||
|
||||
return node
|
||||
|
||||
def node_list(self, fields=None, params=''):
|
||||
"""List baremetal nodes.
|
||||
|
||||
:param List fields: List of fields to show
|
||||
:param String params: Additional kwargs
|
||||
:return: list of JSON node objects
|
||||
"""
|
||||
opts = self.get_opts(fields=fields)
|
||||
output = self.openstack('baremetal node list {0} {1}'
|
||||
.format(opts, params))
|
||||
return json.loads(output)
|
||||
|
||||
def node_show(self, identifier, fields=None, params=''):
|
||||
"""Show specified baremetal node.
|
||||
|
||||
:param String identifier: Name or UUID of the node
|
||||
:param List fields: List of fields to show
|
||||
:param List params: Additional kwargs
|
||||
:return: JSON object of node
|
||||
"""
|
||||
opts = self.get_opts(fields)
|
||||
output = self.openstack('baremetal node show {0} {1} {2}'
|
||||
.format(opts, identifier, params))
|
||||
return json.loads(output)
|
||||
|
||||
def node_delete(self, identifier, ignore_exceptions=False):
|
||||
"""Try to delete baremetal node by name or UUID.
|
||||
|
||||
:param String identifier: Name or UUID of the node
|
||||
:param Bool ignore_exceptions: Ignore exception (needed for cleanUp)
|
||||
:return: raw values output
|
||||
:raise: CommandFailed exception when command fails to delete a node
|
||||
"""
|
||||
try:
|
||||
return self.openstack('baremetal node delete {0}'
|
||||
.format(identifier))
|
||||
except exceptions.CommandFailed:
|
||||
if not ignore_exceptions:
|
||||
raise
|
||||
|
||||
def port_create(self, node_id, mac_address=None, params=''):
|
||||
"""Create baremetal port and add cleanup.
|
||||
|
||||
:param String node_id: baremetal node UUID
|
||||
:param String mac_address: MAC address for port
|
||||
:param String params: Additional args and kwargs
|
||||
:return: JSON object of created port
|
||||
"""
|
||||
if not mac_address:
|
||||
mac_address = data_utils.rand_mac_address()
|
||||
|
||||
opts = self.get_opts()
|
||||
port = self.openstack('baremetal port create {0} '
|
||||
'--node {1} {2} {3}'
|
||||
.format(opts, node_id, mac_address, params))
|
||||
port = json.loads(port)
|
||||
if not port:
|
||||
self.fail('Baremetal port has not been created!')
|
||||
self.addCleanup(self.port_delete, port['uuid'], True)
|
||||
return port
|
||||
|
||||
def port_list(self, fields=None, params=''):
|
||||
"""List baremetal ports.
|
||||
|
||||
:param List fields: List of fields to show
|
||||
:param String params: Additional kwargs
|
||||
:return: list of JSON port objects
|
||||
"""
|
||||
opts = self.get_opts(fields=fields)
|
||||
output = self.openstack('baremetal port list {0} {1}'
|
||||
.format(opts, params))
|
||||
return json.loads(output)
|
||||
|
||||
def port_show(self, uuid, fields=None, params=''):
|
||||
"""Show specified baremetal port.
|
||||
|
||||
:param String uuid: UUID of the port
|
||||
:param List fields: List of fields to show
|
||||
:param List params: Additional kwargs
|
||||
:return: JSON object of port
|
||||
"""
|
||||
opts = self.get_opts(fields)
|
||||
output = self.openstack('baremetal port show {0} {1} {2}'
|
||||
.format(opts, uuid, params))
|
||||
return json.loads(output)
|
||||
|
||||
def port_delete(self, uuid, ignore_exceptions=False):
|
||||
"""Try to delete baremetal port by UUID.
|
||||
|
||||
:param String uuid: UUID of the port
|
||||
:param Bool ignore_exceptions: Ignore exception (needed for cleanUp)
|
||||
:return: raw values output
|
||||
:raise: CommandFailed exception when command fails to delete a port
|
||||
"""
|
||||
try:
|
||||
return self.openstack('baremetal port delete {0}'
|
||||
.format(uuid))
|
||||
except exceptions.CommandFailed:
|
||||
if not ignore_exceptions:
|
||||
raise
|
||||
|
||||
def port_group_list(self, fields=None, params=''):
|
||||
"""List baremetal port groups.
|
||||
|
||||
:param List fields: List of fields to show
|
||||
:param String params: Additional kwargs
|
||||
:return: JSON object of port group list
|
||||
"""
|
||||
opts = self.get_opts(fields=fields)
|
||||
output = self.openstack('baremetal port group list {0} {1}'
|
||||
.format(opts, params))
|
||||
return json.loads(output)
|
||||
|
||||
def port_group_create(self, node_id, name=None, params=''):
|
||||
"""Create baremetal port group.
|
||||
|
||||
:param String node_id: baremetal node UUID
|
||||
:param String name: port group name
|
||||
:param String params: Additional args and kwargs
|
||||
:return: JSON object of created port group
|
||||
"""
|
||||
if not name:
|
||||
name = data_utils.rand_name('port_group')
|
||||
|
||||
opts = self.get_opts()
|
||||
output = self.openstack(
|
||||
'baremetal port group create {0} --node {1} --name {2} {3}'
|
||||
.format(opts, node_id, name, params))
|
||||
|
||||
port_group = json.loads(output)
|
||||
if not port_group:
|
||||
self.fail('Baremetal port group has not been created!')
|
||||
|
||||
self.addCleanup(self.port_group_delete, port_group['uuid'],
|
||||
params=params, ignore_exceptions=True)
|
||||
return port_group
|
||||
|
||||
def port_group_delete(self, identifier, params='',
|
||||
ignore_exceptions=False):
|
||||
"""Try to delete baremetal port group by Name or UUID.
|
||||
|
||||
:param String identifier: Name or UUID of the port group
|
||||
:param String params: temporary arg to pass api version.
|
||||
:param Bool ignore_exceptions: Ignore exception (needed for cleanUp)
|
||||
:return: raw values output
|
||||
:raise: CommandFailed exception if not ignore_exceptions
|
||||
"""
|
||||
try:
|
||||
return self.openstack('baremetal port group delete {0} {1}'
|
||||
.format(identifier, params))
|
||||
except exceptions.CommandFailed:
|
||||
if not ignore_exceptions:
|
||||
raise
|
||||
|
||||
def port_group_show(self, identifier, fields=None, params=''):
|
||||
"""Show specified baremetal port group.
|
||||
|
||||
:param String identifier: Name or UUID of the port group
|
||||
:param List fields: List of fields to show
|
||||
:param List params: Additional kwargs
|
||||
:return: JSON object of port group
|
||||
"""
|
||||
opts = self.get_opts(fields)
|
||||
output = self.openstack('baremetal port group show {0} {1} {2}'
|
||||
.format(identifier, opts, params))
|
||||
return json.loads(output)
|
||||
|
||||
def chassis_create(self, params=''):
|
||||
"""Create baremetal chassis and add cleanup.
|
||||
|
||||
:param String params: Additional args and kwargs
|
||||
:return: JSON object of created chassis
|
||||
"""
|
||||
opts = self.get_opts()
|
||||
chassis = self.openstack('baremetal chassis create {0} {1}'
|
||||
.format(opts, params))
|
||||
|
||||
chassis = json.loads(chassis)
|
||||
if not chassis:
|
||||
self.fail('Baremetal chassis has not been created!')
|
||||
self.addCleanup(self.chassis_delete, chassis['uuid'], True)
|
||||
|
||||
return chassis
|
||||
|
||||
def chassis_delete(self, uuid, ignore_exceptions=False):
|
||||
"""Try to delete baremetal chassis by UUID.
|
||||
|
||||
:param String uuid: UUID of the chassis
|
||||
:param Bool ignore_exceptions: Ignore exception (needed for cleanUp)
|
||||
:return: raw values output
|
||||
:raise: CommandFailed exception when command fails to delete a chassis
|
||||
"""
|
||||
try:
|
||||
return self.openstack('baremetal chassis delete {0}'
|
||||
.format(uuid))
|
||||
except exceptions.CommandFailed:
|
||||
if not ignore_exceptions:
|
||||
raise
|
||||
|
||||
def chassis_list(self, fields=None, params=''):
|
||||
"""List baremetal chassis.
|
||||
|
||||
:param List fields: List of fields to show
|
||||
:param String params: Additional kwargs
|
||||
:return: list of JSON chassis objects
|
||||
"""
|
||||
opts = self.get_opts(fields=fields)
|
||||
output = self.openstack('baremetal chassis list {0} {1}'
|
||||
.format(opts, params))
|
||||
return json.loads(output)
|
||||
|
||||
def chassis_show(self, uuid, fields=None, params=''):
|
||||
"""Show specified baremetal chassis.
|
||||
|
||||
:param String uuid: UUID of the chassis
|
||||
:param List fields: List of fields to show
|
||||
:param List params: Additional kwargs
|
||||
:return: JSON object of chassis
|
||||
"""
|
||||
opts = self.get_opts(fields)
|
||||
chassis = self.openstack('baremetal chassis show {0} {1} {2}'
|
||||
.format(opts, uuid, params))
|
||||
return json.loads(chassis)
|
@ -1,86 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from ironicclient.tests.functional.osc.v1 import base
|
||||
|
||||
|
||||
class BaremetalChassisTests(base.TestCase):
|
||||
"""Functional tests for baremetal chassis commands."""
|
||||
|
||||
def setUp(self):
|
||||
super(BaremetalChassisTests, self).setUp()
|
||||
self.chassis = self.chassis_create()
|
||||
|
||||
def test_list(self):
|
||||
"""Check baremetal chassis list command.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal chassis in setUp.
|
||||
2) List baremetal chassis.
|
||||
3) Check chassis description and UUID in chassis list.
|
||||
"""
|
||||
chassis_list = self.chassis_list()
|
||||
self.assertIn(self.chassis['uuid'],
|
||||
[x['UUID'] for x in chassis_list])
|
||||
self.assertIn(self.chassis['description'],
|
||||
[x['Description'] for x in chassis_list])
|
||||
|
||||
def test_show(self):
|
||||
"""Check baremetal chassis show command.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal chassis in setUp.
|
||||
2) Show baremetal chassis.
|
||||
3) Check chassis in chassis show.
|
||||
"""
|
||||
chassis = self.chassis_show(self.chassis['uuid'])
|
||||
self.assertEqual(self.chassis['uuid'], chassis['uuid'])
|
||||
self.assertEqual(self.chassis['description'], chassis['description'])
|
||||
|
||||
def test_delete(self):
|
||||
"""Check baremetal chassis delete command.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal chassis in setUp.
|
||||
2) Delete baremetal chassis by UUID.
|
||||
3) Check that chassis deleted successfully.
|
||||
"""
|
||||
output = self.chassis_delete(self.chassis['uuid'])
|
||||
self.assertIn('Deleted chassis {0}'.format(self.chassis['uuid']),
|
||||
output)
|
||||
self.assertNotIn(self.chassis['uuid'], self.chassis_list(['UUID']))
|
||||
|
||||
def test_set_unset_extra(self):
|
||||
"""Check baremetal chassis set and unset commands.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal chassis in setUp.
|
||||
2) Set extra data for chassis.
|
||||
3) Check that baremetal chassis extra data was set.
|
||||
4) Unset extra data for chassis.
|
||||
5) Check that baremetal chassis extra data was unset.
|
||||
"""
|
||||
extra_key = 'ext'
|
||||
extra_value = 'testdata'
|
||||
self.openstack('baremetal chassis set --extra {0}={1} {2}'
|
||||
.format(extra_key, extra_value, self.chassis['uuid']))
|
||||
|
||||
show_prop = self.chassis_show(self.chassis['uuid'], ['extra'])
|
||||
self.assertEqual(extra_value, show_prop['extra'][extra_key])
|
||||
|
||||
self.openstack('baremetal chassis unset --extra {0} {1}'
|
||||
.format(extra_key, self.chassis['uuid']))
|
||||
|
||||
show_prop = self.chassis_show(self.chassis['uuid'], ['extra'])
|
||||
self.assertNotIn(extra_key, show_prop['extra'])
|
@ -1,199 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ddt
|
||||
from tempest.lib.common.utils import data_utils
|
||||
|
||||
from ironicclient.tests.functional.osc.v1 import base
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class BaremetalNodeTests(base.TestCase):
|
||||
"""Functional tests for baremetal node commands."""
|
||||
|
||||
def setUp(self):
|
||||
super(BaremetalNodeTests, self).setUp()
|
||||
self.node = self.node_create()
|
||||
|
||||
def test_warning_version_not_specified(self):
|
||||
"""Test API version warning is printed when API version unspecified.
|
||||
|
||||
A warning will appear for any invocation of the baremetal OSC plugin
|
||||
without --os-baremetal-api-version specified. It's tested with a simple
|
||||
node list command here.
|
||||
"""
|
||||
output = self.openstack('baremetal node list', merge_stderr=True)
|
||||
self.assertIn('the default will be the latest API version', output)
|
||||
|
||||
def test_no_warning_version_specified(self):
|
||||
"""Test API version warning is not printed when API version specified.
|
||||
|
||||
This warning should not appear when a user specifies the ironic API
|
||||
version to use.
|
||||
"""
|
||||
output = self.openstack('baremetal --os-baremetal-api-version=1.9 node'
|
||||
' list', merge_stderr=True)
|
||||
self.assertNotIn('the default will be the latest API version', output)
|
||||
|
||||
def test_create_name_uuid(self):
|
||||
"""Check baremetal node create command with name and UUID.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal node in setUp.
|
||||
2) Create one more baremetal node explicitly
|
||||
with specified name and UUID.
|
||||
3) Check that node successfully created.
|
||||
"""
|
||||
uuid = data_utils.rand_uuid()
|
||||
name = data_utils.rand_name('baremetal-node')
|
||||
node_info = self.node_create(name=name,
|
||||
params='--uuid {0}'.format(uuid))
|
||||
self.assertEqual(node_info['uuid'], uuid)
|
||||
self.assertEqual(node_info['name'], name)
|
||||
self.assertEqual(node_info['driver'], 'fake')
|
||||
self.assertEqual(node_info['maintenance'], False)
|
||||
node_list = self.node_list()
|
||||
self.assertIn(uuid, [x['UUID'] for x in node_list])
|
||||
self.assertIn(name, [x['Name'] for x in node_list])
|
||||
|
||||
@ddt.data('name', 'uuid')
|
||||
def test_delete(self, key):
|
||||
"""Check baremetal node delete command with name/UUID argument.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal node in setUp.
|
||||
2) Delete baremetal node by name/UUID.
|
||||
3) Check that node deleted successfully.
|
||||
"""
|
||||
output = self.node_delete(self.node[key])
|
||||
self.assertIn('Deleted node {0}'.format(self.node[key]), output)
|
||||
node_list = self.node_list()
|
||||
self.assertNotIn(self.node['name'], [x['Name'] for x in node_list])
|
||||
self.assertNotIn(self.node['uuid'], [x['UUID'] for x in node_list])
|
||||
|
||||
def test_list(self):
|
||||
"""Check baremetal node list command.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal node in setUp.
|
||||
2) List baremetal nodes.
|
||||
3) Check node name in nodes list.
|
||||
"""
|
||||
node_list = self.node_list()
|
||||
self.assertIn(self.node['name'], [x['Name'] for x in node_list])
|
||||
self.assertIn(self.node['uuid'], [x['UUID'] for x in node_list])
|
||||
|
||||
@ddt.data('name', 'uuid')
|
||||
def test_set(self, key):
|
||||
"""Check baremetal node set command calling it by name/UUID.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal node in setUp.
|
||||
2) Set another name for node calling it by name/UUID.
|
||||
3) Check that baremetal node name was changed.
|
||||
"""
|
||||
new_name = data_utils.rand_name('newnodename')
|
||||
self.openstack('baremetal node set --name {0} {1}'
|
||||
.format(new_name, self.node[key]))
|
||||
show_prop = self.node_show(self.node['uuid'], ['name'])
|
||||
self.assertEqual(new_name, show_prop['name'])
|
||||
|
||||
@ddt.data('name', 'uuid')
|
||||
def test_unset(self, key):
|
||||
"""Check baremetal node unset command calling it by node name/UUID.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal node in setUp.
|
||||
2) Unset name of baremetal node calling it by node name/UUID.
|
||||
3) Check that node has no more name.
|
||||
"""
|
||||
self.openstack('baremetal node unset --name {0}'
|
||||
.format(self.node[key]))
|
||||
show_prop = self.node_show(self.node['uuid'], ['name'])
|
||||
self.assertIsNone(show_prop['name'])
|
||||
|
||||
@ddt.data('name', 'uuid')
|
||||
def test_show(self, key):
|
||||
"""Check baremetal node show command with name and UUID arguments.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal node in setUp.
|
||||
2) Show baremetal node calling it with name and UUID arguments.
|
||||
3) Check name, uuid and driver in node show output.
|
||||
"""
|
||||
node = self.node_show(self.node[key],
|
||||
['name', 'uuid', 'driver'])
|
||||
self.assertEqual(self.node['name'], node['name'])
|
||||
self.assertEqual(self.node['uuid'], node['uuid'])
|
||||
self.assertEqual(self.node['driver'], node['driver'])
|
||||
|
||||
def test_baremetal_node_maintenance_set_unset(self):
|
||||
"""Check baremetal node maintenance set command.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal node in setUp.
|
||||
2) Check maintenance status of fresh node is False.
|
||||
3) Set maintenance status for node.
|
||||
4) Check maintenance status of node is True.
|
||||
5) Unset maintenance status for node.
|
||||
6) Check maintenance status of node is False back.
|
||||
"""
|
||||
show_prop = self.node_show(self.node['name'], ['maintenance'])
|
||||
self.assertFalse(show_prop['maintenance'])
|
||||
|
||||
self.openstack('baremetal node maintenance set {0}'.
|
||||
format(self.node['name']))
|
||||
|
||||
show_prop = self.node_show(self.node['name'], ['maintenance'])
|
||||
self.assertTrue(show_prop['maintenance'])
|
||||
|
||||
self.openstack('baremetal node maintenance unset {0}'.
|
||||
format(self.node['name']))
|
||||
|
||||
show_prop = self.node_show(self.node['name'], ['maintenance'])
|
||||
self.assertFalse(show_prop['maintenance'])
|
||||
|
||||
def test_baremetal_node_maintenance_set_unset_reason(self):
|
||||
"""Check baremetal node maintenance set command.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal node in setUp.
|
||||
2) Check initial maintenance reason is None.
|
||||
3) Set maintenance status for node with reason.
|
||||
4) Check maintenance reason of node equals to expected value.
|
||||
Also check maintenance status.
|
||||
5) Unset maintenance status for node. Recheck maintenance status.
|
||||
6) Check maintenance reason is None. Recheck maintenance status.
|
||||
"""
|
||||
reason = "Hardware maintenance."
|
||||
show_prop = self.node_show(self.node['name'],
|
||||
['maintenance_reason', 'maintenance'])
|
||||
self.assertIsNone(show_prop['maintenance_reason'])
|
||||
self.assertFalse(show_prop['maintenance'])
|
||||
|
||||
self.openstack("baremetal node maintenance set --reason '{0}' {1}".
|
||||
format(reason, self.node['name']))
|
||||
|
||||
show_prop = self.node_show(self.node['name'],
|
||||
['maintenance_reason', 'maintenance'])
|
||||
self.assertEqual(reason, show_prop['maintenance_reason'])
|
||||
self.assertTrue(show_prop['maintenance'])
|
||||
|
||||
self.openstack('baremetal node maintenance unset {0}'.
|
||||
format(self.node['name']))
|
||||
|
||||
show_prop = self.node_show(self.node['name'],
|
||||
['maintenance_reason', 'maintenance'])
|
||||
self.assertIsNone(show_prop['maintenance_reason'])
|
||||
self.assertFalse(show_prop['maintenance'])
|
@ -1,48 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ddt
|
||||
import six
|
||||
from tempest.lib import exceptions
|
||||
|
||||
from ironicclient.tests.functional.osc.v1 import base
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class BaremetalNodeCreateNegativeTests(base.TestCase):
|
||||
"""Negative tests for node create command."""
|
||||
|
||||
def setUp(self):
|
||||
super(BaremetalNodeCreateNegativeTests, self).setUp()
|
||||
|
||||
@ddt.data(
|
||||
('--uuid', '', 'expected one argument'),
|
||||
('--uuid', '!@#$^*&%^', 'Expected a UUID'),
|
||||
('--uuid', '0000 0000', 'unrecognized arguments'),
|
||||
('--driver-info', '', 'expected one argument'),
|
||||
('--driver-info', 'some info', 'unrecognized arguments'),
|
||||
('--property', '', 'expected one argument'),
|
||||
('--property', 'some property', 'unrecognized arguments'),
|
||||
('--extra', '', 'expected one argument'),
|
||||
('--extra', 'some extra', 'unrecognized arguments'),
|
||||
('--name', '', 'expected one argument'),
|
||||
('--name', 'some name', 'unrecognized arguments'),
|
||||
('--network-interface', '', 'expected one argument'),
|
||||
('--resource-class', '', 'expected one argument'))
|
||||
@ddt.unpack
|
||||
def test_baremetal_node_create(self, argument, value, ex_text):
|
||||
base_cmd = 'baremetal node create --driver fake'
|
||||
command = self.construct_cmd(base_cmd, argument, value)
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.openstack, command)
|
@ -1,137 +0,0 @@
|
||||
# Copyright (c) 2017 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import six
|
||||
from tempest.lib import exceptions
|
||||
|
||||
from ironicclient.tests.functional.osc.v1 import base
|
||||
|
||||
|
||||
class TestNodeListFields(base.TestCase):
|
||||
"""Functional tests for "baremetal node list" with --fields."""
|
||||
|
||||
def setUp(self):
|
||||
super(TestNodeListFields, self).setUp()
|
||||
self.node = self.node_create()
|
||||
|
||||
def _get_table_headers(self, raw_output):
|
||||
table = self.parser.table(raw_output)
|
||||
return table['headers']
|
||||
|
||||
def test_list_default_fields(self):
|
||||
"""Test presence of default list table headers."""
|
||||
headers = ['UUID', 'Name', 'Instance UUID',
|
||||
'Power State', 'Provisioning State', 'Maintenance']
|
||||
|
||||
nodes_list = self.openstack('baremetal node list')
|
||||
nodes_list_headers = self._get_table_headers(nodes_list)
|
||||
|
||||
self.assertEqual(set(headers), set(nodes_list_headers))
|
||||
|
||||
def test_list_minimal_fields(self):
|
||||
headers = ['Instance UUID', 'Name', 'UUID']
|
||||
fields = ['instance_uuid', 'name', 'uuid']
|
||||
|
||||
node_list = self.openstack(
|
||||
'baremetal node list --fields {}'
|
||||
.format(' '.join(fields)))
|
||||
|
||||
nodes_list_headers = self._get_table_headers(node_list)
|
||||
self.assertEqual(headers, nodes_list_headers)
|
||||
|
||||
def test_list_no_fields(self):
|
||||
command = 'baremetal node list --fields'
|
||||
ex_text = 'expected at least one argument'
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.openstack, command)
|
||||
|
||||
def test_list_wrong_field(self):
|
||||
command = 'baremetal node list --fields ABC'
|
||||
ex_text = 'invalid choice'
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.openstack, command)
|
||||
|
||||
|
||||
class TestNodeShowFields(base.TestCase):
|
||||
"""Functional tests for "baremetal node show" with --fields."""
|
||||
|
||||
def setUp(self):
|
||||
super(TestNodeShowFields, self).setUp()
|
||||
self.node = self.node_create()
|
||||
self.api_version = '--os-baremetal-api-version 1.20'
|
||||
|
||||
def _get_table_rows(self, raw_output):
|
||||
table = self.parser.table(raw_output)
|
||||
rows = []
|
||||
for row in table['values']:
|
||||
rows.append(row[0])
|
||||
return rows
|
||||
|
||||
def test_show_default_fields(self):
|
||||
rows = ['console_enabled',
|
||||
'clean_step',
|
||||
'created_at',
|
||||
'driver',
|
||||
'driver_info',
|
||||
'driver_internal_info',
|
||||
'extra',
|
||||
'inspection_finished_at',
|
||||
'inspection_started_at',
|
||||
'instance_info',
|
||||
'instance_uuid',
|
||||
'last_error',
|
||||
'maintenance',
|
||||
'maintenance_reason',
|
||||
'name',
|
||||
'power_state',
|
||||
'properties',
|
||||
'provision_state',
|
||||
'provision_updated_at',
|
||||
'reservation',
|
||||
'target_power_state',
|
||||
'target_provision_state',
|
||||
'updated_at',
|
||||
'uuid']
|
||||
node_show = self.openstack('baremetal node show {}'
|
||||
.format(self.node['uuid']))
|
||||
nodes_show_rows = self._get_table_rows(node_show)
|
||||
|
||||
self.assertTrue(set(rows).issubset(set(nodes_show_rows)))
|
||||
|
||||
def test_show_minimal_fields(self):
|
||||
rows = [
|
||||
'instance_uuid',
|
||||
'name',
|
||||
'uuid']
|
||||
|
||||
node_show = self.openstack(
|
||||
'baremetal node show {} --fields {} {}'
|
||||
.format(self.node['uuid'], ' '.join(rows), self.api_version))
|
||||
|
||||
nodes_show_rows = self._get_table_rows(node_show)
|
||||
self.assertEqual(set(rows), set(nodes_show_rows))
|
||||
|
||||
def test_show_no_fields(self):
|
||||
command = 'baremetal node show {} --fields {}'.format(
|
||||
self.node['uuid'], self.api_version)
|
||||
ex_text = 'expected at least one argument'
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.openstack, command)
|
||||
|
||||
def test_show_wrong_field(self):
|
||||
command = 'baremetal node show {} --fields ABC {}'.format(
|
||||
self.node['uuid'], self.api_version)
|
||||
ex_text = 'invalid choice'
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.openstack, command)
|
@ -1,82 +0,0 @@
|
||||
# Copyright (c) 2017 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ddt
|
||||
import six
|
||||
from tempest.lib import exceptions
|
||||
|
||||
from ironicclient.tests.functional.osc.v1 import base
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class BaremetalNodeNegativeTests(base.TestCase):
|
||||
"""Negative tests for baremetal node commands."""
|
||||
|
||||
def setUp(self):
|
||||
super(BaremetalNodeNegativeTests, self).setUp()
|
||||
self.node = self.node_create()
|
||||
|
||||
@ddt.data(
|
||||
('', '', 'error: argument --driver is required'),
|
||||
('--driver', 'wrongdriver',
|
||||
'No valid host was found. Reason: No conductor service '
|
||||
'registered which supports driver wrongdriver.')
|
||||
)
|
||||
@ddt.unpack
|
||||
def test_create_driver(self, argument, value, ex_text):
|
||||
"""Negative test for baremetal node driver options."""
|
||||
base_cmd = 'baremetal node create'
|
||||
command = self.construct_cmd(base_cmd, argument, value)
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.openstack, command)
|
||||
|
||||
def test_delete_no_node(self):
|
||||
"""Test for baremetal node delete without node specified."""
|
||||
command = 'baremetal node delete'
|
||||
ex_text = 'error: too few arguments'
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.openstack, command)
|
||||
|
||||
def test_list_wrong_argument(self):
|
||||
"""Test for baremetal node list with wrong argument."""
|
||||
command = 'baremetal node list --wrong_arg'
|
||||
ex_text = 'error: unrecognized arguments: --wrong_arg'
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.openstack, command)
|
||||
|
||||
@ddt.data(
|
||||
('--property', '', 'error: too few arguments'),
|
||||
('--property', 'prop', 'Attributes must be a list of PATH=VALUE')
|
||||
)
|
||||
@ddt.unpack
|
||||
def test_set_property(self, argument, value, ex_text):
|
||||
"""Negative test for baremetal node set command options."""
|
||||
base_cmd = 'baremetal node set'
|
||||
command = self.construct_cmd(base_cmd, argument, value,
|
||||
self.node['uuid'])
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.openstack, command)
|
||||
|
||||
@ddt.data(
|
||||
('--property', '', 'error: too few arguments'),
|
||||
('--property', 'prop', "Reason: can't remove non-existent object")
|
||||
)
|
||||
@ddt.unpack
|
||||
def test_unset_property(self, argument, value, ex_text):
|
||||
"""Negative test for baremetal node unset command options."""
|
||||
base_cmd = 'baremetal node unset'
|
||||
command = self.construct_cmd(base_cmd, argument, value,
|
||||
self.node['uuid'])
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.openstack, command)
|
@ -1,58 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from ironicclient.tests.functional.osc.v1 import base
|
||||
|
||||
|
||||
class PowerStateTests(base.TestCase):
|
||||
"""Functional tests for baremetal node power state commands."""
|
||||
|
||||
def setUp(self):
|
||||
super(PowerStateTests, self).setUp()
|
||||
self.node = self.node_create()
|
||||
|
||||
def test_off_reboot_on(self):
|
||||
"""Reboot node from Power OFF state.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal node in setUp.
|
||||
2) Set node Power State OFF as precondition.
|
||||
3) Call reboot command for baremetal node.
|
||||
4) Check node Power State ON in node properties.
|
||||
"""
|
||||
self.openstack('baremetal node power off {0}'
|
||||
.format(self.node['uuid']))
|
||||
show_prop = self.node_show(self.node['uuid'], ['power_state'])
|
||||
self.assertEqual('power off', show_prop['power_state'])
|
||||
|
||||
self.openstack('baremetal node reboot {0}'.format(self.node['uuid']))
|
||||
show_prop = self.node_show(self.node['uuid'], ['power_state'])
|
||||
self.assertEqual('power on', show_prop['power_state'])
|
||||
|
||||
def test_on_reboot_on(self):
|
||||
"""Reboot node from Power ON state.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal node in setUp.
|
||||
2) Set node Power State ON as precondition.
|
||||
3) Call reboot command for baremetal node.
|
||||
4) Check node Power State ON in node properties.
|
||||
"""
|
||||
self.openstack('baremetal node power on {0}'.format(self.node['uuid']))
|
||||
show_prop = self.node_show(self.node['uuid'], ['power_state'])
|
||||
self.assertEqual('power on', show_prop['power_state'])
|
||||
|
||||
self.openstack('baremetal node reboot {0}'.format(self.node['uuid']))
|
||||
show_prop = self.node_show(self.node['uuid'], ['power_state'])
|
||||
self.assertEqual('power on', show_prop['power_state'])
|
@ -1,81 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from ironicclient.tests.functional.osc.v1 import base
|
||||
|
||||
|
||||
class ProvisionStateTests(base.TestCase):
|
||||
"""Functional tests for baremetal node provision state commands."""
|
||||
|
||||
def setUp(self):
|
||||
super(ProvisionStateTests, self).setUp()
|
||||
self.node = self.node_create()
|
||||
|
||||
def test_deploy_rebuild_undeploy(self):
|
||||
"""Deploy, rebuild and undeploy node.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal node in setUp.
|
||||
2) Check initial "available" provision state.
|
||||
3) Set baremetal node "deploy" provision state.
|
||||
4) Check baremetal node provision_state field value is "active".
|
||||
5) Set baremetal node "rebuild" provision state.
|
||||
6) Check baremetal node provision_state field value is "active".
|
||||
7) Set baremetal node "undeploy" provision state.
|
||||
8) Check baremetal node provision_state field value is "available".
|
||||
"""
|
||||
show_prop = self.node_show(self.node['uuid'], ["provision_state"])
|
||||
self.assertEqual("available", show_prop["provision_state"])
|
||||
|
||||
# deploy
|
||||
self.openstack('baremetal node deploy {0}'.format(self.node['uuid']))
|
||||
show_prop = self.node_show(self.node['uuid'], ["provision_state"])
|
||||
self.assertEqual("active", show_prop["provision_state"])
|
||||
|
||||
# rebuild
|
||||
self.openstack('baremetal node rebuild {0}'.format(self.node['uuid']))
|
||||
|
||||
show_prop = self.node_show(self.node['uuid'], ["provision_state"])
|
||||
self.assertEqual("active", show_prop["provision_state"])
|
||||
|
||||
# undeploy
|
||||
self.openstack('baremetal node undeploy {0}'.format(self.node['uuid']))
|
||||
|
||||
show_prop = self.node_show(self.node['uuid'], ["provision_state"])
|
||||
self.assertEqual("available", show_prop["provision_state"])
|
||||
|
||||
def test_manage_provide(self):
|
||||
"""Manage and provide node back.
|
||||
|
||||
Steps:
|
||||
1) Create baremetal node in setUp.
|
||||
2) Check initial "available" provision state.
|
||||
3) Set baremetal node "manage" provision state.
|
||||
4) Check baremetal node provision_state field value is "manageable".
|
||||
5) Set baremetal node "provide" provision state.
|
||||
6) Check baremetal node provision_state field value is "available".
|
||||
"""
|
||||
|
||||
show_prop = self.node_show(self.node['uuid'], ["provision_state"])
|
||||
self.assertEqual("available", show_prop["provision_state"])
|
||||
|
||||
# manage
|
||||
self.openstack('baremetal node manage {0}'.format(self.node['uuid']))
|
||||
show_prop = self.node_show(self.node['uuid'], ["provision_state"])
|
||||
self.assertEqual("manageable", show_prop["provision_state"])
|
||||
|
||||
# provide back
|
||||
self.openstack('baremetal node provide {0}'.format(self.node['uuid']))
|
||||
show_prop = self.node_show(self.node['uuid'], ["provision_state"])
|
||||
self.assertEqual("available", show_prop["provision_state"])
|
@ -1,106 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from ironicclient.tests.functional.osc.v1 import base
|
||||
|
||||
|
||||
class BaremetalPortTests(base.TestCase):
|
||||
"""Functional tests for baremetal port commands."""
|
||||
|
||||
def setUp(self):
|
||||
super(BaremetalPortTests, self).setUp()
|
||||
self.node = self.node_create()
|
||||
self.port = self.port_create(self.node['uuid'])
|
||||
|
||||
def test_list(self):
|
||||
"""Check baremetal port list command.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal port in setUp.
|
||||
2) List baremetal ports.
|
||||
3) Check port address and UUID in ports list.
|
||||
"""
|
||||
port_list = self.port_list()
|
||||
self.assertIn(self.port['address'],
|
||||
[port['Address'] for port in port_list])
|
||||
self.assertIn(self.port['uuid'],
|
||||
[port['UUID'] for port in port_list])
|
||||
|
||||
def test_show_uuid(self):
|
||||
"""Check baremetal port show command with UUID.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal port in setUp.
|
||||
2) Show baremetal port calling it by UUID.
|
||||
3) Check port fields in output.
|
||||
"""
|
||||
port = self.port_show(self.port['uuid'])
|
||||
self.assertEqual(self.port['address'], port['address'])
|
||||
self.assertEqual(self.port['uuid'], port['uuid'])
|
||||
self.assertEqual(self.port['node_uuid'], self.node['uuid'])
|
||||
|
||||
def test_show_addr(self):
|
||||
"""Check baremetal port show command with address.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal port in setUp.
|
||||
2) Show baremetal port calling it by address.
|
||||
3) Check port fields in output.
|
||||
"""
|
||||
port = self.port_show(
|
||||
uuid='', params='--address {}'.format(self.port['address']))
|
||||
self.assertEqual(self.port['address'], port['address'])
|
||||
self.assertEqual(self.port['uuid'], port['uuid'])
|
||||
self.assertEqual(self.port['node_uuid'], self.node['uuid'])
|
||||
|
||||
def test_delete(self):
|
||||
"""Check baremetal port delete command.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal port in setUp.
|
||||
2) Delete baremetal port by UUID.
|
||||
3) Check that port deleted successfully and not in list.
|
||||
"""
|
||||
output = self.port_delete(self.port['uuid'])
|
||||
self.assertIn('Deleted port {0}'.format(self.port['uuid']), output)
|
||||
port_list = self.port_list()
|
||||
self.assertNotIn(self.port['address'],
|
||||
[port['Address'] for port in port_list])
|
||||
self.assertNotIn(self.port['uuid'],
|
||||
[port['UUID'] for port in port_list])
|
||||
|
||||
def test_set_unset_extra(self):
|
||||
"""Check baremetal port set and unset commands.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal port in setUp.
|
||||
2) Set extra data for port.
|
||||
3) Check that baremetal port extra data was set.
|
||||
4) Unset extra data for port.
|
||||
5) Check that baremetal port extra data was unset.
|
||||
"""
|
||||
extra_key = 'ext'
|
||||
extra_value = 'testdata'
|
||||
self.openstack('baremetal port set --extra {0}={1} {2}'
|
||||
.format(extra_key, extra_value, self.port['uuid']))
|
||||
|
||||
show_prop = self.port_show(self.port['uuid'], ['extra'])
|
||||
self.assertEqual(extra_value, show_prop['extra'][extra_key])
|
||||
|
||||
self.openstack('baremetal port unset --extra {0} {1}'
|
||||
.format(extra_key, self.port['uuid']))
|
||||
|
||||
show_prop = self.port_show(self.port['uuid'], ['extra'])
|
||||
self.assertNotIn(extra_key, show_prop['extra'])
|
@ -1,129 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ddt
|
||||
|
||||
from tempest.lib.common.utils import data_utils
|
||||
|
||||
from ironicclient.tests.functional.osc.v1 import base
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class BaremetalPortGroupTests(base.TestCase):
|
||||
"""Functional tests for baremetal port group commands."""
|
||||
|
||||
def setUp(self):
|
||||
super(BaremetalPortGroupTests, self).setUp()
|
||||
self.node = self.node_create()
|
||||
self.api_version = ' --os-baremetal-api-version 1.25'
|
||||
self.port_group = self.port_group_create(self.node['uuid'],
|
||||
params=self.api_version)
|
||||
|
||||
def test_create_with_address(self):
|
||||
"""Check baremetal port group create command with address argument.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal port group in setUp.
|
||||
2) Create baremetal port group with specific address argument.
|
||||
3) Check address of created port group.
|
||||
"""
|
||||
mac_address = data_utils.rand_mac_address()
|
||||
port_group = self.port_group_create(
|
||||
self.node['uuid'],
|
||||
params='{0} --address {1}'.format(self.api_version, mac_address))
|
||||
self.assertEqual(mac_address, port_group['address'])
|
||||
|
||||
def test_list(self):
|
||||
"""Check baremetal port group list command.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal port group in setUp.
|
||||
2) List baremetal port groups.
|
||||
3) Check port group address, UUID and name in port groups list.
|
||||
"""
|
||||
port_group_list = self.port_group_list(params=self.api_version)
|
||||
|
||||
self.assertIn(self.port_group['uuid'],
|
||||
[x['UUID'] for x in port_group_list])
|
||||
self.assertIn(self.port_group['name'],
|
||||
[x['Name'] for x in port_group_list])
|
||||
|
||||
@ddt.data('name', 'uuid')
|
||||
def test_delete(self, key):
|
||||
"""Check baremetal port group delete command.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal port group in setUp.
|
||||
2) Delete baremetal port group by UUID.
|
||||
3) Check that port group deleted successfully and not in list.
|
||||
"""
|
||||
output = self.port_group_delete(self.port_group[key],
|
||||
params=self.api_version)
|
||||
self.assertEqual('Deleted port group {0}'
|
||||
.format(self.port_group[key]), output.strip())
|
||||
|
||||
port_group_list = self.port_group_list(params=self.api_version)
|
||||
|
||||
self.assertNotIn(self.port_group['uuid'],
|
||||
[x['UUID'] for x in port_group_list])
|
||||
self.assertNotIn(self.port_group['name'],
|
||||
[x['Name'] for x in port_group_list])
|
||||
|
||||
@ddt.data('name', 'uuid')
|
||||
def test_show(self, key):
|
||||
"""Check baremetal port group show command.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal port group in setUp.
|
||||
2) Show baremetal port group.
|
||||
3) Check name, uuid and address in port group show output.
|
||||
"""
|
||||
port_group = self.port_group_show(
|
||||
self.port_group[key],
|
||||
['name', 'uuid', 'address'],
|
||||
params=self.api_version)
|
||||
|
||||
self.assertEqual(self.port_group['name'], port_group['name'])
|
||||
self.assertEqual(self.port_group['uuid'], port_group['uuid'])
|
||||
self.assertEqual(self.port_group['address'], port_group['address'])
|
||||
|
||||
@ddt.data('name', 'uuid')
|
||||
def test_set_unset(self, key):
|
||||
"""Check baremetal port group set and unset commands.
|
||||
|
||||
Test steps:
|
||||
1) Create baremetal port group in setUp.
|
||||
2) Set extra data for port group.
|
||||
3) Check that baremetal port group extra data was set.
|
||||
4) Unset extra data for port group.
|
||||
5) Check that baremetal port group extra data was unset.
|
||||
"""
|
||||
extra_key = 'ext'
|
||||
extra_value = 'testdata'
|
||||
self.openstack(
|
||||
'baremetal port group set --extra {0}={1} {2} {3}'
|
||||
.format(extra_key, extra_value, self.port_group[key],
|
||||
self.api_version))
|
||||
|
||||
show_prop = self.port_group_show(self.port_group[key], ['extra'],
|
||||
params=self.api_version)
|
||||
self.assertEqual(extra_value, show_prop['extra'][extra_key])
|
||||
|
||||
self.openstack('baremetal port group unset --extra {0} {1} {2}'
|
||||
.format(extra_key, self.port_group[key],
|
||||
self.api_version))
|
||||
|
||||
show_prop = self.port_group_show(self.port_group[key], ['extra'],
|
||||
params=self.api_version)
|
||||
self.assertNotIn(extra_key, show_prop['extra'])
|
@ -1,213 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import six
|
||||
from tempest.lib.common.utils import data_utils
|
||||
from tempest.lib import exceptions
|
||||
|
||||
from ironicclient.tests.functional import base
|
||||
|
||||
|
||||
class ChassisSanityTestIronicClient(base.FunctionalTestBase):
|
||||
"""Sanity tests for testing actions with Chassis.
|
||||
|
||||
Smoke test for the Ironic CLI commands which checks basic actions with
|
||||
chassis command like create, show, update, delete etc.
|
||||
"""
|
||||
def setUp(self):
|
||||
super(ChassisSanityTestIronicClient, self).setUp()
|
||||
self.chassis = self.create_chassis()
|
||||
|
||||
def test_chassis_create(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis
|
||||
2) check that chassis has been successfully created
|
||||
"""
|
||||
chassis_list_uuid = self.get_chassis_uuids_from_chassis_list()
|
||||
self.assertIn(self.chassis['uuid'], chassis_list_uuid)
|
||||
|
||||
def test_chassis_delete(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis
|
||||
2) check that chassis has been successfully created
|
||||
3) delete chassis
|
||||
4) check that chassis has been successfully deleted
|
||||
"""
|
||||
self.delete_chassis(self.chassis['uuid'])
|
||||
chassis_list_uuid = self.get_chassis_uuids_from_chassis_list()
|
||||
|
||||
self.assertNotIn(self.chassis['uuid'], chassis_list_uuid)
|
||||
|
||||
def test_chassis_show(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis
|
||||
2) check that chassis-show returns the same chassis UUID
|
||||
3) chassis-create
|
||||
"""
|
||||
chassis_show = self.show_chassis(self.chassis['uuid'])
|
||||
self.assertEqual(self.chassis['uuid'], chassis_show['uuid'])
|
||||
|
||||
def test_chassis_show_field(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis
|
||||
2) show chassis with fields uuid
|
||||
3) check that fields is exist
|
||||
"""
|
||||
fields = ['uuid']
|
||||
chassis_show = self.show_chassis(self.chassis['uuid'],
|
||||
params='--fields {0}'
|
||||
.format(*fields))
|
||||
self.assertTableHeaders(fields, chassis_show.keys())
|
||||
|
||||
def test_chassis_update(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis
|
||||
2) update chassis
|
||||
3) check that chassis has been successfully updated
|
||||
"""
|
||||
updated_chassis = self.update_chassis(
|
||||
self.chassis['uuid'], 'add', 'description=test-chassis')
|
||||
self.assertEqual('test-chassis', updated_chassis['description'])
|
||||
self.assertNotEqual(self.chassis['description'],
|
||||
updated_chassis['description'])
|
||||
|
||||
def test_chassis_node_list(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis in setUp()
|
||||
2) create 3 nodes
|
||||
3) update 2 nodes to be included in chassis
|
||||
4) check if 2 nodes are added to chassis
|
||||
5) check if 1 nodes isn't added to chassis
|
||||
"""
|
||||
node1 = self.create_node()
|
||||
node2 = self.create_node()
|
||||
|
||||
# This node is created to show that it won't be present
|
||||
# in the chassis-node-list output
|
||||
|
||||
node3 = self.create_node()
|
||||
updated_node1 = self.update_node(node1['uuid'],
|
||||
'add chassis_uuid={0}'
|
||||
.format(self.chassis['uuid']))
|
||||
updated_node2 = self.update_node(node2['uuid'],
|
||||
'add chassis_uuid={0}'
|
||||
.format(self.chassis['uuid']))
|
||||
nodes = [updated_node1['uuid'], updated_node2['uuid']]
|
||||
nodes.sort()
|
||||
nodes_uuids = self.get_nodes_uuids_from_chassis_node_list(
|
||||
self.chassis['uuid'])
|
||||
nodes_uuids.sort()
|
||||
self.assertEqual(nodes, nodes_uuids)
|
||||
self.assertNotIn(node3['uuid'], nodes_uuids)
|
||||
|
||||
|
||||
class ChassisNegativeTestsIronicClient(base.FunctionalTestBase):
|
||||
"""Negative tests for testing actions with Chassis.
|
||||
|
||||
Negative tests for the Ironic CLI commands which checks actions with
|
||||
chassis command like show, update, delete either using with arguments
|
||||
or without arguments.
|
||||
"""
|
||||
|
||||
def test_chassis_delete_without_arguments(self):
|
||||
"""Test step:
|
||||
|
||||
1) check that chassis-delete command without arguments
|
||||
triggers an exception
|
||||
"""
|
||||
ex_text = r'chassis-delete: error: too few arguments'
|
||||
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed,
|
||||
ex_text,
|
||||
self.delete_chassis, '')
|
||||
|
||||
def test_chassis_delete_with_incorrect_chassis_uuid(self):
|
||||
"""Test step:
|
||||
|
||||
1) check that deleting non-exist chassis triggers an exception
|
||||
triggers an exception
|
||||
"""
|
||||
uuid = data_utils.rand_uuid()
|
||||
ex_text = (r"Chassis {0} "
|
||||
r"could not be found. \(HTTP 404\)".format(uuid))
|
||||
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed,
|
||||
ex_text,
|
||||
self.delete_chassis,
|
||||
'{0}'.format(uuid))
|
||||
|
||||
def test_chassis_show_without_arguments(self):
|
||||
"""Test step:
|
||||
|
||||
1) check that chassis-show command without arguments
|
||||
triggers an exception
|
||||
"""
|
||||
ex_text = r'chassis-show: error: too few arguments'
|
||||
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed,
|
||||
ex_text,
|
||||
self.show_chassis, '')
|
||||
|
||||
def test_chassis_show_with_incorrect_chassis_uuid(self):
|
||||
"""Test step:
|
||||
|
||||
1) check that chassis-show command with incorrect chassis
|
||||
uuid triggers an exception
|
||||
"""
|
||||
uuid = data_utils.rand_uuid()
|
||||
ex_text = (r"Chassis {0} "
|
||||
r"could not be found. \(HTTP 404\)".format(uuid))
|
||||
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed,
|
||||
ex_text,
|
||||
self.show_chassis,
|
||||
'{0}'.format(uuid))
|
||||
|
||||
def test_chassis_update_without_arguments(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis
|
||||
2) check that chassis-update command without arguments
|
||||
triggers an exception
|
||||
"""
|
||||
ex_text = r'chassis-update: error: too few arguments'
|
||||
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed,
|
||||
ex_text,
|
||||
self.update_chassis,
|
||||
chassis_id='',
|
||||
operation='')
|
||||
|
||||
def test_chassis_update_with_incorrect_chassis_uuid(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis
|
||||
2) check that chassis-update command with incorrect arguments
|
||||
triggers an exception
|
||||
"""
|
||||
uuid = data_utils.rand_uuid()
|
||||
ex_text = r'chassis-update: error: too few arguments'
|
||||
|
||||
six.assertRaisesRegex(self,
|
||||
exceptions.CommandFailed,
|
||||
ex_text,
|
||||
self.update_chassis,
|
||||
chassis_id='{0}'.format(uuid),
|
||||
operation='')
|
@ -1,129 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import six
|
||||
from tempest.lib.common.utils import data_utils
|
||||
from tempest.lib import exceptions
|
||||
|
||||
from ironicclient.tests.functional import base
|
||||
|
||||
|
||||
class NegativeChassisCreateTestsIronicClient(base.FunctionalTestBase):
|
||||
"""Negative tests for testing chassis-create command.
|
||||
|
||||
Negative tests for the Ironic CLI commands which check actions with
|
||||
chassis-create command like create chassis without arguments or with
|
||||
incorrect arguments and check that correct error message raised.
|
||||
"""
|
||||
|
||||
error_msg = r'ironic chassis-create: error:'
|
||||
expected_msg = r'expected one argument'
|
||||
|
||||
def test_description_no_value(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis using -d argument without the value
|
||||
2) create chassis using --description argument without the value
|
||||
3) check that command using -d argument triggers an exception
|
||||
4) check that command with --description arg triggers an exception
|
||||
"""
|
||||
ex_text = (r'{0} argument -d/--description: {1}'
|
||||
.format(self.error_msg, self.expected_msg))
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.create_chassis, '-d')
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.create_chassis, '--description')
|
||||
|
||||
def test_metadata_extra_no_value(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis using -e argument without the value
|
||||
2) create chassis using --extra argument without the value
|
||||
3) check that command using -e argument triggers an exception
|
||||
4) check that command with --extra argument triggers an exception
|
||||
"""
|
||||
ex_text = (r'{0} argument -e/--extra: {1}'
|
||||
.format(self.error_msg, self.expected_msg))
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.create_chassis, '-e')
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.create_chassis, '--extra')
|
||||
|
||||
def test_specific_uuid_no_value(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis using -u argument without the value
|
||||
2) create chassis using --uuid argument without the value
|
||||
3) check that command using -u argument triggers an exception
|
||||
4) check that command with --uuid argument triggers an exception
|
||||
"""
|
||||
ex_text = (r'{0} argument -u/--uuid: {1}'
|
||||
.format(self.error_msg, self.expected_msg))
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.create_chassis, '-u')
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.create_chassis, '--uuid')
|
||||
|
||||
def test_invalid_description(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis with invalid description using -d argument
|
||||
2) create chassis with invalid description using --description arg
|
||||
3) check that command using -d argument triggers an exception
|
||||
4) check that command using --uuid argument triggers an exception
|
||||
"""
|
||||
description = '--'
|
||||
ex_text = (r'{0} argument -d/--description: {1}'
|
||||
.format(self.error_msg, self.expected_msg))
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.create_chassis,
|
||||
params='-d {0}'.format(description))
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.create_chassis,
|
||||
params='--description {0}'.format(description))
|
||||
|
||||
def test_invalid_metadata_extra(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis with invalid metadata using -e argument
|
||||
2) create chassis with invalid metadata using --extra argument
|
||||
3) check that command using -e argument triggers an exception
|
||||
4) check that command using --extra argument triggers an exception
|
||||
"""
|
||||
extra = "HelloWorld"
|
||||
ex_text = (r'{0} Attributes must be a list of PATH=VALUE'
|
||||
.format(self.error_msg))
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.create_chassis,
|
||||
params='-e {0}'.format(extra))
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.create_chassis,
|
||||
params='--extra {0}'.format(extra))
|
||||
|
||||
def test_invalid_specific_uuid(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create chassis with invalid specific uuid using -u argument
|
||||
2) create chassis with invalid specific uuid using --uuid argument
|
||||
3) check that command using -u argument triggers an exception
|
||||
4) check that command using --uuid argument triggers an exception
|
||||
"""
|
||||
invalid_uuid = data_utils.rand_uuid()[:-1]
|
||||
ex_text = r'Expected a UUID but received {0}'.format(invalid_uuid)
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.create_chassis,
|
||||
params='-u {0}'.format(invalid_uuid))
|
||||
six.assertRaisesRegex(self, exceptions.CommandFailed, ex_text,
|
||||
self.create_chassis,
|
||||
params='--uuid {0}'.format(invalid_uuid))
|
@ -1,56 +0,0 @@
|
||||
# Copyright (c) 2015 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from ironicclient.tests.functional import base
|
||||
|
||||
|
||||
class DriverSanityTestIronicClient(base.FunctionalTestBase):
|
||||
"""Sanity tests for testing actions with driver.
|
||||
|
||||
Smoke test for the Ironic CLI commands which checks basic actions with
|
||||
driver command like driver-show, driver-properties.
|
||||
"""
|
||||
|
||||
def test_driver_show(self):
|
||||
"""Test steps:
|
||||
|
||||
1) get drivers names
|
||||
2) check that each driver exists in driver-show output
|
||||
"""
|
||||
drivers_names = self.get_drivers_names()
|
||||
for driver in drivers_names:
|
||||
driver_show = self.show_driver(driver)
|
||||
self.assertEqual(driver, driver_show['name'])
|
||||
|
||||
def test_driver_properties(self):
|
||||
"""Test steps:
|
||||
|
||||
1) get drivers names
|
||||
2) check that each driver has some properties
|
||||
"""
|
||||
drivers_names = self.get_drivers_names()
|
||||
for driver in drivers_names:
|
||||
driver_properties = self.properties_driver(driver)
|
||||
self.assertNotEqual([], [x['Property'] for x in driver_properties])
|
||||
|
||||
def test_driver_list(self):
|
||||
"""Test steps:
|
||||
|
||||
1) get list of drivers
|
||||
2) check that list of drivers is not empty
|
||||
"""
|
||||
driver = 'fake'
|
||||
available_drivers = self.get_drivers_names()
|
||||
self.assertTrue(len(available_drivers) > 0)
|
||||
self.assertIn(driver, available_drivers)
|
@ -1,77 +0,0 @@
|
||||
# Copyright (c) 2015 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from ironicclient.tests.functional import base
|
||||
|
||||
|
||||
class IronicClientHelp(base.FunctionalTestBase):
|
||||
"""Test for python-ironicclient help messages."""
|
||||
|
||||
def test_ironic_help(self):
|
||||
"""Check Ironic client main help message contents."""
|
||||
caption = ("Command-line interface to the "
|
||||
"OpenStack Bare Metal Provisioning API.")
|
||||
subcommands = {
|
||||
'bash-completion',
|
||||
'chassis-create',
|
||||
'chassis-delete',
|
||||
'chassis-list',
|
||||
'chassis-node-list',
|
||||
'chassis-show',
|
||||
'chassis-update',
|
||||
'driver-list',
|
||||
'driver-properties',
|
||||
'driver-show',
|
||||
'driver-vendor-passthru',
|
||||
'help',
|
||||
'node-create',
|
||||
'node-delete',
|
||||
'node-get-boot-device',
|
||||
'node-get-console',
|
||||
'node-get-supported-boot-devices',
|
||||
'node-list',
|
||||
'node-port-list',
|
||||
'node-set-boot-device',
|
||||
'node-set-console-mode',
|
||||
'node-set-maintenance',
|
||||
'node-set-power-state',
|
||||
'node-set-provision-state',
|
||||
'node-show',
|
||||
'node-show-states',
|
||||
'node-update',
|
||||
'node-validate',
|
||||
'node-vendor-passthru',
|
||||
'node-vif-attach',
|
||||
'node-vif-detach',
|
||||
'node-vif-list',
|
||||
'port-create',
|
||||
'port-delete',
|
||||
'port-list',
|
||||
'port-show',
|
||||
'port-update'
|
||||
}
|
||||
|
||||
output = self._ironic('help', flags='', params='')
|
||||
|
||||
self.assertIn(caption, output)
|
||||
for string in subcommands:
|
||||
self.assertIn(string, output)
|
||||
|
||||
def test_warning_on_api_version(self):
|
||||
result = self._ironic('help', merge_stderr=True)
|
||||
self.assertIn('You are using the default API version', result)
|
||||
|
||||
result = self._ironic('help', flags='--ironic-api-version 1.9',
|
||||
merge_stderr=True)
|
||||
self.assertNotIn('You are using the default API version', result)
|
@ -1,287 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
|
||||
import jsonschema
|
||||
from tempest.lib.common.utils import data_utils
|
||||
|
||||
from ironicclient.tests.functional import base
|
||||
|
||||
|
||||
def _is_valid_json(json_response, schema):
|
||||
"""Verify JSON is valid.
|
||||
|
||||
:param json_response: JSON response from CLI
|
||||
:type json_response: string
|
||||
:param schema: expected schema of response
|
||||
:type json_response: dictionary
|
||||
"""
|
||||
try:
|
||||
json_response = json.loads(json_response)
|
||||
jsonschema.validate(json_response, schema)
|
||||
except jsonschema.ValidationError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class TestNodeJsonResponse(base.FunctionalTestBase):
|
||||
"""Test JSON responses for node commands."""
|
||||
|
||||
node_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"target_power_state": {"type": ["string", "null"]},
|
||||
"extra": {"type": "object"},
|
||||
"last_error": {"type": ["string", "null"]},
|
||||
"updated_at": {"type": ["string", "null"]},
|
||||
"maintenance_reason": {"type": ["string", "null"]},
|
||||
"provision_state": {"type": "string"},
|
||||
"clean_step": {"type": "object"},
|
||||
"uuid": {"type": "string"},
|
||||
"console_enabled": {"type": "boolean"},
|
||||
"target_provision_state": {"type": ["string", "null"]},
|
||||
"raid_config": {"type": "string"},
|
||||
"provision_updated_at": {"type": ["string", "null"]},
|
||||
"maintenance": {"type": "boolean"},
|
||||
"target_raid_config": {"type": "string"},
|
||||
"inspection_started_at": {"type": ["string", "null"]},
|
||||
"inspection_finished_at": {"type": ["string", "null"]},
|
||||
"power_state": {"type": ["string", "null"]},
|
||||
"driver": {"type": "string"},
|
||||
"reservation": {"type": ["string", "null"]},
|
||||
"properties": {"type": "object"},
|
||||
"instance_uuid": {"type": ["string", "null"]},
|
||||
"name": {"type": ["string", "null"]},
|
||||
"driver_info": {"type": "object"},
|
||||
"created_at": {"type": "string"},
|
||||
"driver_internal_info": {"type": "object"},
|
||||
"chassis_uuid": {"type": ["string", "null"]},
|
||||
"instance_info": {"type": "object"}
|
||||
}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestNodeJsonResponse, self).setUp()
|
||||
self.node = self.create_node()
|
||||
|
||||
def test_node_list_json(self):
|
||||
"""Test JSON response for nodes list."""
|
||||
schema = {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"instance_uuid": {"type": ["string", "null"]},
|
||||
"maintenance": {"type": "boolean"},
|
||||
"name": {"type": ["string", "null"]},
|
||||
"power_state": {"type": ["string", "null"]},
|
||||
"provision_state": {"type": "string"},
|
||||
"uuid": {"type": "string"}}}
|
||||
}
|
||||
response = self.ironic('node-list', flags='--json',
|
||||
params='', parse=False)
|
||||
self.assertTrue(_is_valid_json(response, schema))
|
||||
|
||||
def test_node_show_json(self):
|
||||
"""Test JSON response for node show."""
|
||||
response = self.ironic('node-show', flags='--json', params='{0}'
|
||||
.format(self.node['uuid']), parse=False)
|
||||
self.assertTrue(_is_valid_json(response, self.node_schema))
|
||||
|
||||
def test_node_validate_json(self):
|
||||
"""Test JSON response for node validation."""
|
||||
schema = {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"interface": {"type": ["string", "null"]},
|
||||
"result": {"type": "boolean"},
|
||||
"reason": {"type": ["string", "null"]}}}
|
||||
}
|
||||
response = self.ironic('node-validate', flags='--json',
|
||||
params='{0}'.format(self.node['uuid']),
|
||||
parse=False)
|
||||
self.assertTrue(_is_valid_json(response, schema))
|
||||
|
||||
def test_node_show_states_json(self):
|
||||
"""Test JSON response for node show states."""
|
||||
schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"target_power_state": {"type": ["string", "null"]},
|
||||
"target_provision_state": {"type": ["string", "null"]},
|
||||
"last_error": {"type": ["string", "null"]},
|
||||
"console_enabled": {"type": "boolean"},
|
||||
"provision_updated_at": {"type": ["string", "null"]},
|
||||
"power_state": {"type": ["string", "null"]},
|
||||
"provision_state": {"type": "string"}
|
||||
}
|
||||
}
|
||||
response = self.ironic('node-show-states', flags='--json',
|
||||
params='{0}'.format(self.node['uuid']),
|
||||
parse=False)
|
||||
self.assertTrue(_is_valid_json(response, schema))
|
||||
|
||||
def test_node_create_json(self):
|
||||
"""Test JSON response for node creation."""
|
||||
schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"uuid": {"type": "string"},
|
||||
"driver_info": {"type": "object"},
|
||||
"extra": {"type": "object"},
|
||||
"driver": {"type": "string"},
|
||||
"chassis_uuid": {"type": ["string", "null"]},
|
||||
"properties": {"type": "object"},
|
||||
"name": {"type": ["string", "null"]},
|
||||
}
|
||||
}
|
||||
node_name = 'nodejson'
|
||||
response = self.ironic('node-create', flags='--json',
|
||||
params='-d fake -n {0}'.format(node_name),
|
||||
parse=False)
|
||||
self.addCleanup(self.delete_node, node_name)
|
||||
self.assertTrue(_is_valid_json(response, schema))
|
||||
|
||||
def test_node_update_json(self):
|
||||
"""Test JSON response for node update."""
|
||||
node_name = data_utils.rand_name('test')
|
||||
response = self.ironic('node-update', flags='--json',
|
||||
params='{0} add name={1}'
|
||||
.format(self.node['uuid'], node_name),
|
||||
parse=False)
|
||||
self.assertTrue(_is_valid_json(response, self.node_schema))
|
||||
|
||||
|
||||
class TestDriverJsonResponse(base.FunctionalTestBase):
|
||||
"""Test JSON responses for driver commands."""
|
||||
|
||||
def test_driver_list_json(self):
|
||||
"""Test JSON response for drivers list."""
|
||||
schema = {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"hosts": {"type": "string"},
|
||||
}}
|
||||
}
|
||||
response = self.ironic('driver-list', flags='--json', parse=False)
|
||||
self.assertTrue(_is_valid_json(response, schema))
|
||||
|
||||
def test_driver_show_json(self):
|
||||
"""Test JSON response for driver show."""
|
||||
schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"hosts": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}}
|
||||
}
|
||||
}
|
||||
drivers_names = self.get_drivers_names()
|
||||
for driver in drivers_names:
|
||||
response = self.ironic('driver-show', flags='--json',
|
||||
params='{0}'.format(driver), parse=False)
|
||||
self.assertTrue(_is_valid_json(response, schema))
|
||||
|
||||
def test_driver_properties_json(self):
|
||||
"""Test JSON response for driver properties."""
|
||||
schema = {
|
||||
"type": "object",
|
||||
"additionalProperties": {"type": "string"}
|
||||
}
|
||||
drivers_names = self.get_drivers_names()
|
||||
for driver in drivers_names:
|
||||
response = self.ironic('driver-properties', flags='--json',
|
||||
params='{0}'.format(driver), parse=False)
|
||||
self.assertTrue(_is_valid_json(response, schema))
|
||||
|
||||
|
||||
class TestChassisJsonResponse(base.FunctionalTestBase):
|
||||
"""Test JSON responses for chassis commands."""
|
||||
|
||||
chassis_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"uuid": {"type": "string"},
|
||||
"updated_at": {"type": ["string", "null"]},
|
||||
"created_at": {"type": "string"},
|
||||
"description": {"type": ["string", "null"]},
|
||||
"extra": {"type": "object"}}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(TestChassisJsonResponse, self).setUp()
|
||||
self.chassis = self.create_chassis()
|
||||
|
||||
def test_chassis_list_json(self):
|
||||
"""Test JSON response for chassis list."""
|
||||
schema = {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"uuid": {"type": "string"},
|
||||
"description": {"type": ["string", "null"]}}
|
||||
}
|
||||
}
|
||||
response = self.ironic('chassis-list', flags='--json', parse=False)
|
||||
self.assertTrue(_is_valid_json(response, schema))
|
||||
|
||||
def test_chassis_show_json(self):
|
||||
"""Test JSON response for chassis show."""
|
||||
response = self.ironic('chassis-show', flags='--json',
|
||||
params='{0}'.format(self.chassis['uuid']),
|
||||
parse=False)
|
||||
self.assertTrue(_is_valid_json(response, self.chassis_schema))
|
||||
|
||||
def test_chassis_create_json(self):
|
||||
"""Test JSON response for chassis create."""
|
||||
response = self.ironic('chassis-create', flags='--json', parse=False)
|
||||
self.assertTrue(_is_valid_json(response, self.chassis_schema))
|
||||
|
||||
def test_chassis_update_json(self):
|
||||
"""Test JSON response for chassis update."""
|
||||
response = self.ironic(
|
||||
'chassis-update', flags='--json', params='{0} {1} {2}'.format(
|
||||
self.chassis['uuid'], 'add', 'description=test-chassis'),
|
||||
parse=False)
|
||||
self.assertTrue(_is_valid_json(response, self.chassis_schema))
|
||||
|
||||
def test_chassis_node_list_json(self):
|
||||
"""Test JSON response for chassis-node-list command."""
|
||||
schema = {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"instance_uuid": {"type": ["string", "null"]},
|
||||
"maintenance": {"type": "boolean"},
|
||||
"name": {"type": ["string", "null"]},
|
||||
"power_state": {"type": ["string", "null"]},
|
||||
"provision_state": {"type": "string"},
|
||||
"uuid": {"type": "string"}}}
|
||||
}
|
||||
self.node = self.create_node()
|
||||
self.update_node(self.node['uuid'], 'add chassis_uuid={0}'
|
||||
.format(self.chassis['uuid']))
|
||||
response = self.ironic('chassis-node-list', flags='--json',
|
||||
params='{0}'.format(self.chassis['uuid']),
|
||||
parse=False)
|
||||
self.assertTrue(_is_valid_json(response, schema))
|
@ -1,210 +0,0 @@
|
||||
# Copyright (c) 2015 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from tempest.lib.common.utils import data_utils
|
||||
|
||||
from ironicclient.tests.functional import base
|
||||
import ironicclient.tests.functional.utils as utils
|
||||
|
||||
|
||||
class NodeSanityTestIronicClient(base.FunctionalTestBase):
|
||||
"""Sanity tests for testing actions with Node.
|
||||
|
||||
Smoke test for the Ironic CLI commands which checks basic actions with
|
||||
node command like create, delete etc.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
super(NodeSanityTestIronicClient, self).setUp()
|
||||
self.node = self.create_node()
|
||||
|
||||
def test_node_create(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) check that node has been successfully created
|
||||
"""
|
||||
self.assertIn(self.node['uuid'], self.get_nodes_uuids_from_node_list())
|
||||
|
||||
def test_node_show(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) check that created node UUID equals to the one present
|
||||
in node-show output
|
||||
"""
|
||||
node_show = self.show_node(self.node['uuid'])
|
||||
self.assertEqual(self.node['uuid'], node_show['uuid'])
|
||||
|
||||
def test_node_show_field(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) show node with fields instance_uuid, driver, name, uuid
|
||||
3) check that only fields instance_uuid, driver, name,
|
||||
uuid are the output fields
|
||||
"""
|
||||
fields = ['instance_uuid', 'driver', 'name', 'uuid']
|
||||
node_show = self.show_node(self.node['uuid'],
|
||||
params='--fields %s' % ' '.join(fields))
|
||||
self.assertTableHeaders(fields, node_show.keys())
|
||||
|
||||
def test_node_delete(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) check that it was created
|
||||
3) delete node
|
||||
4) check that node has been successfully deleted
|
||||
"""
|
||||
self.assertIn(self.node['uuid'], self.get_nodes_uuids_from_node_list())
|
||||
self.delete_node(self.node['uuid'])
|
||||
self.assertNotIn(self.node['uuid'],
|
||||
self.get_nodes_uuids_from_node_list())
|
||||
|
||||
def test_node_update(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) update node name
|
||||
3) check that node name has been successfully updated
|
||||
"""
|
||||
node_name = data_utils.rand_name(prefix='test')
|
||||
updated_node = self.update_node(self.node['uuid'],
|
||||
'add name={0}'.format(node_name))
|
||||
self.assertEqual(node_name, updated_node['name'])
|
||||
|
||||
def test_node_set_console_mode(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) check that console_enabled is False
|
||||
3) set node console mode to True
|
||||
4) check that node console mode has been successfully updated
|
||||
"""
|
||||
node_show = self.show_node(self.node['uuid'])
|
||||
|
||||
self.assertEqual('False', node_show['console_enabled'])
|
||||
|
||||
self.ironic('node-set-console-mode',
|
||||
params='{0} true'.format(self.node['uuid']))
|
||||
node_show = self.show_node(self.node['uuid'])
|
||||
|
||||
self.assertEqual('True', node_show['console_enabled'])
|
||||
|
||||
def test_node_get_console(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) check console mode using node-show
|
||||
3) get console mode using node-get-console
|
||||
4) check that node-get-console value equals node-show value
|
||||
"""
|
||||
node_show = self.show_node(self.node['uuid'])
|
||||
node_get = self.ironic('node-get-console', params=self.node['uuid'])
|
||||
node_get = utils.get_dict_from_output(node_get)
|
||||
|
||||
self.assertEqual(node_show['console_enabled'],
|
||||
node_get['console_enabled'])
|
||||
|
||||
def test_node_set_maintenance(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) check that maintenance is False
|
||||
3) put node to maintenance
|
||||
4) check that node is in maintenance
|
||||
5) check that maintenance reason has been successfully updated
|
||||
"""
|
||||
node_show = self.show_node(self.node['uuid'])
|
||||
|
||||
self.assertEqual('False', node_show['maintenance'])
|
||||
|
||||
self.set_node_maintenance(
|
||||
self.node['uuid'],
|
||||
"true --reason 'Testing node-set power state command'")
|
||||
node_show = self.show_node(self.node['uuid'])
|
||||
|
||||
self.assertEqual('True', node_show['maintenance'])
|
||||
self.assertEqual('Testing node-set power state command',
|
||||
node_show['maintenance_reason'])
|
||||
|
||||
def test_node_set_power_state(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) check that power state is None
|
||||
3) set power state to 'off'
|
||||
4) check that power state has been changed successfully
|
||||
"""
|
||||
node_show = self.show_node(self.node['uuid'])
|
||||
|
||||
self.assertEqual('None', node_show['power_state'])
|
||||
|
||||
self.set_node_power_state(self.node['uuid'], "off")
|
||||
node_show = self.show_node(self.node['uuid'])
|
||||
|
||||
self.assertEqual('power off', node_show['power_state'])
|
||||
|
||||
def test_node_set_provision_state(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) check that provision state is 'available'
|
||||
3) set new provision state to the node
|
||||
4) check that provision state has been updated successfully
|
||||
"""
|
||||
node_show = self.show_node(self.node['uuid'])
|
||||
|
||||
self.assertEqual('available', node_show['provision_state'])
|
||||
|
||||
self.set_node_provision_state(self.node['uuid'], 'active')
|
||||
node_show = self.show_node(self.node['uuid'])
|
||||
|
||||
self.assertEqual('active', node_show['provision_state'])
|
||||
|
||||
def test_node_validate(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) validate node
|
||||
"""
|
||||
node_validate = self.validate_node(self.node['uuid'])
|
||||
self.assertNodeValidate(node_validate)
|
||||
|
||||
def test_show_node_states(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) check that states returned by node-show and node-show-states
|
||||
are the same
|
||||
"""
|
||||
node_show = self.show_node(self.node['uuid'])
|
||||
show_node_states = self.show_node_states(self.node['uuid'])
|
||||
self.assertNodeStates(node_show, show_node_states)
|
||||
|
||||
def test_node_list(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node in setup and one more node explicitly
|
||||
2) check that both nodes are in list
|
||||
"""
|
||||
other_node = self.create_node()
|
||||
node_list = self.list_nodes()
|
||||
uuids = [x['UUID'] for x in node_list]
|
||||
names = [x['Name'] for x in node_list]
|
||||
self.assertIn(self.node['uuid'], uuids)
|
||||
self.assertIn(other_node['uuid'], uuids)
|
||||
self.assertIn(self.node['name'], names)
|
||||
self.assertIn(other_node['name'], names)
|
@ -1,89 +0,0 @@
|
||||
# Copyright (c) 2015 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from ironicclient.tests.functional import base
|
||||
|
||||
|
||||
class NodeSetPowerStateTestIronicClient(base.FunctionalTestBase):
|
||||
"""Tests for testing node-set-power-state command.
|
||||
|
||||
Tests for the Ironic CLI node-set-power-state command that checks that
|
||||
node can be set to 'on', 'off' or 'reboot' power states
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
super(NodeSetPowerStateTestIronicClient, self).setUp()
|
||||
self.node = self.create_node()
|
||||
node_power_state = self.show_node_states(self.node['uuid'])
|
||||
self.assertEqual('None', node_power_state['power_state'])
|
||||
|
||||
def test_node_set_power_state_on(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) set node power state to 'on'
|
||||
3) check node power state has been set to 'on'
|
||||
"""
|
||||
self.set_node_power_state(self.node['uuid'], 'on')
|
||||
node_state = self.show_node_states(self.node['uuid'])
|
||||
self.assertEqual('power on', node_state['power_state'])
|
||||
|
||||
def test_node_set_power_state_off(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) set node power state to 'off'
|
||||
3) check node power state has been set to 'off'
|
||||
"""
|
||||
self.set_node_power_state(self.node['uuid'], 'off')
|
||||
node_state = self.show_node_states(self.node['uuid'])
|
||||
self.assertEqual('power off', node_state['power_state'])
|
||||
|
||||
def test_node_set_power_state_reboot_node_off(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) set node power state to 'off'
|
||||
3) check node power state has been set to 'off'
|
||||
4) set node power state to 'reboot'
|
||||
5) check node power state has been set to 'on'
|
||||
"""
|
||||
self.set_node_power_state(self.node['uuid'], 'off')
|
||||
node_state = self.show_node_states(self.node['uuid'])
|
||||
|
||||
self.assertEqual('power off', node_state['power_state'])
|
||||
|
||||
self.set_node_power_state(self.node['uuid'], 'reboot')
|
||||
node_state = self.show_node_states(self.node['uuid'])
|
||||
|
||||
self.assertEqual('power on', node_state['power_state'])
|
||||
|
||||
def test_node_set_power_state_reboot_node_on(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node
|
||||
2) set node power state to 'on'
|
||||
3) check node power state has been set to 'on'
|
||||
4) set node power state to 'reboot'
|
||||
5) check node power state has been set to 'on'
|
||||
"""
|
||||
self.set_node_power_state(self.node['uuid'], 'on')
|
||||
node_state = self.show_node_states(self.node['uuid'])
|
||||
|
||||
self.assertEqual('power on', node_state['power_state'])
|
||||
|
||||
self.set_node_power_state(self.node['uuid'], 'reboot')
|
||||
node_state = self.show_node_states(self.node['uuid'])
|
||||
|
||||
self.assertEqual('power on', node_state['power_state'])
|
@ -1,134 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from ironicclient.tests.functional import base
|
||||
|
||||
|
||||
class PortSanityTestIronicClient(base.FunctionalTestBase):
|
||||
"""Sanity tests for testing actions with port.
|
||||
|
||||
Smoke test for the Ironic CLI commands which checks basic actions with
|
||||
port command like create, show, update, delete etc.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
super(PortSanityTestIronicClient, self).setUp()
|
||||
self.node = self.create_node()
|
||||
self.port = self.create_port(self.node['uuid'])
|
||||
|
||||
def test_port_create(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node in setUp()
|
||||
2) create port in setUp()
|
||||
3) check that port has been successfully created
|
||||
"""
|
||||
port_list_uuid = self.get_uuids_from_port_list()
|
||||
self.assertIn(self.port['uuid'], port_list_uuid)
|
||||
|
||||
def test_port_delete(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node in setUp()
|
||||
2) create port in setUp()
|
||||
3) check that port has been successfully created
|
||||
4) delete port
|
||||
5) check that port has been successfully deleted
|
||||
"""
|
||||
port_list_uuid = self.get_uuids_from_port_list()
|
||||
self.assertIn(self.port['uuid'], port_list_uuid)
|
||||
|
||||
self.delete_port(self.port['uuid'])
|
||||
|
||||
port_list_uuid = self.get_uuids_from_port_list()
|
||||
self.assertNotIn(self.port['uuid'], port_list_uuid)
|
||||
|
||||
def test_port_show(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node in setUp()
|
||||
2) create port in setUp()
|
||||
3) check that port-show returns the same port UUID as port-create
|
||||
"""
|
||||
port_show = self.show_port(self.port['uuid'])
|
||||
self.assertEqual(self.port['uuid'], port_show['uuid'])
|
||||
|
||||
def test_port_show_field(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node in setUp()
|
||||
2) create port in setUp()
|
||||
3) show port with fields uuid, address, node_uuid
|
||||
4) check that only fields uuid, address,
|
||||
node_uuid are the output fields
|
||||
"""
|
||||
fields = ['uuid', 'address', 'node_uuid']
|
||||
port_show = self.show_port(self.port['uuid'],
|
||||
params='--fields {0} {1} {2}'
|
||||
.format(*fields))
|
||||
self.assertTableHeaders(fields, port_show.keys())
|
||||
|
||||
def test_port_update(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node in setUp()
|
||||
2) create port in setUp()
|
||||
3) create node to replace
|
||||
4) update port replacing node
|
||||
5) check that port has been successfully updated
|
||||
"""
|
||||
node_to_replace = self.create_node()
|
||||
updated_port = self.update_port(self.port['uuid'],
|
||||
'replace',
|
||||
params='node_uuid={0}'
|
||||
.format(node_to_replace['uuid']))
|
||||
|
||||
self.assertEqual(node_to_replace['uuid'], updated_port['node_uuid'])
|
||||
self.assertNotEqual(self.port['node_uuid'], updated_port['node_uuid'])
|
||||
|
||||
def test_port_list(self):
|
||||
"""Test steps:
|
||||
|
||||
1) create node and port in setUp()
|
||||
2) create one more node and port explicitly
|
||||
3) check that port-list contains UUIDs of created ports
|
||||
4) check that port-list contains Addresses of created ports
|
||||
"""
|
||||
other_node = self.create_node()
|
||||
other_port = self.create_port(other_node['uuid'])
|
||||
|
||||
port_list = self.list_ports()
|
||||
uuids = {x['UUID'] for x in port_list}
|
||||
self.assertTrue({self.port['uuid'],
|
||||
other_port['uuid']}.issubset(uuids))
|
||||
addresses = {x['Address'] for x in port_list}
|
||||
self.assertTrue({self.port['address'],
|
||||
other_port['address']}.issubset(addresses))
|
||||
|
||||
def test_port_create_with_portgroup_uuid(self):
|
||||
"""Test steps:
|
||||
|
||||
1) Create node in setUp().
|
||||
2) Create a port group.
|
||||
3) Create a port with specified port group UUID.
|
||||
4) Check port properties for portgroup_uuid.
|
||||
"""
|
||||
flag = '--ironic-api-version 1.25'
|
||||
port_group = self.create_portgroup(self.node['uuid'])
|
||||
port = self.create_port(
|
||||
self.node['uuid'],
|
||||
flags=flag,
|
||||
params='--portgroup {0}'.format(port_group['uuid']))
|
||||
|
||||
self.assertEqual(port_group['uuid'], port['portgroup_uuid'])
|
@ -1,114 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from ironicclient.tests.functional import base
|
||||
|
||||
|
||||
class PortGroupSanityTest(base.FunctionalTestBase):
|
||||
"""Sanity tests for testing actions with port groups.
|
||||
|
||||
Smoke test for the Ironic CLI port group subcommands:
|
||||
create, show, update, delete, list, port-list.
|
||||
"""
|
||||
def setUp(self):
|
||||
super(PortGroupSanityTest, self).setUp()
|
||||
self.node = self.create_node()
|
||||
self.port_group = self.create_portgroup(self.node['uuid'])
|
||||
|
||||
def test_portgroup_create(self):
|
||||
"""Test steps:
|
||||
|
||||
1) Create node and port group in setUp().
|
||||
2) Check that port group has been successfully created.
|
||||
"""
|
||||
portgroup_list_uuid = self.get_portgroup_uuids_from_portgroup_list()
|
||||
self.assertIn(self.port_group['uuid'], portgroup_list_uuid)
|
||||
|
||||
def test_portgroup_delete(self):
|
||||
"""Test steps:
|
||||
|
||||
1) Create node and port group in setUp().
|
||||
2) Delete port group.
|
||||
3) Check that port group has been successfully deleted.
|
||||
"""
|
||||
self.delete_portgroup(self.port_group['uuid'])
|
||||
portgroup_list_uuid = self.get_portgroup_uuids_from_portgroup_list()
|
||||
self.assertNotIn(self.port_group['uuid'], portgroup_list_uuid)
|
||||
|
||||
def test_portgroup_show(self):
|
||||
"""Test steps:
|
||||
|
||||
1) Create node and port group in setUp().
|
||||
2) Check that portgroup-show returns the same UUID as portgroup-create.
|
||||
"""
|
||||
portgroup_show = self.show_portgroup(self.port_group['uuid'])
|
||||
self.assertEqual(self.port_group['uuid'], portgroup_show['uuid'])
|
||||
self.assertEqual(self.port_group['name'], portgroup_show['name'])
|
||||
|
||||
def test_portgroup_list(self):
|
||||
"""Test steps:
|
||||
|
||||
1) Create node and port group in setUp().
|
||||
2) Create one more node and port group.
|
||||
3) Check that portgroup-list contains UUIDs
|
||||
of all created port groups.
|
||||
"""
|
||||
other_node = self.create_node()
|
||||
other_portgroup = self.create_portgroup(other_node['uuid'])
|
||||
|
||||
uuids = {x['UUID'] for x in self.list_portgroups()}
|
||||
|
||||
self.assertTrue({self.port_group['uuid'],
|
||||
other_portgroup['uuid']}.issubset(uuids))
|
||||
|
||||
def test_portgroup_update(self):
|
||||
"""Test steps:
|
||||
|
||||
1) Create node and port group in setUp().
|
||||
2) Create node to replace.
|
||||
3) Set new node to maintenance.
|
||||
4) Update port group by replacing node.
|
||||
5) Check that port group has been successfully updated.
|
||||
"""
|
||||
node_to_replace = self.create_node()
|
||||
self.set_node_maintenance(node_to_replace['uuid'], True)
|
||||
updated_portgroup = self.update_portgroup(
|
||||
self.port_group['uuid'], 'replace', params='node_uuid={0}'
|
||||
.format(node_to_replace['uuid'])
|
||||
)
|
||||
self.assertEqual(node_to_replace['uuid'],
|
||||
updated_portgroup['node_uuid'])
|
||||
self.assertNotEqual(self.port_group['node_uuid'],
|
||||
updated_portgroup['node_uuid'])
|
||||
|
||||
def test_portgroup_port_list(self):
|
||||
"""Test steps:
|
||||
|
||||
1) Create node and port group in setUp().
|
||||
2) Create a port.
|
||||
3) Set node to maintenance.
|
||||
4) Attach port to the port group.
|
||||
5) List the ports associated with a port group.
|
||||
6) Check port UUID in list.
|
||||
7) Check port address in list.
|
||||
"""
|
||||
port = self.create_port(self.node['uuid'])
|
||||
self.set_node_maintenance(self.node['uuid'], True)
|
||||
self.update_port(port['uuid'], 'replace',
|
||||
flags='--ironic-api-version 1.25',
|
||||
params='portgroup_uuid={0}'
|
||||
.format(self.port_group['uuid']))
|
||||
pg_port_list = self.portgroup_port_list(self.port_group['uuid'])
|
||||
self.assertIn(port['uuid'], [x['UUID'] for x in pg_port_list])
|
||||
self.assertIn(port['address'], [x['Address'] for x in pg_port_list])
|
@ -1,62 +0,0 @@
|
||||
# Copyright (c) 2015 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from ironicclient.tests.functional import base
|
||||
|
||||
|
||||
class TableStructureIronicCLITests(base.FunctionalTestBase):
|
||||
"""Basic, read-only table structure tests for Ironic CLI commands.
|
||||
|
||||
Basic smoke tests for the Ironic CLI commands to check table structure
|
||||
which do not require creating or modifying Ironic objects.
|
||||
"""
|
||||
|
||||
def test_chassis_list_table_structure(self):
|
||||
"""Test steps:
|
||||
|
||||
1) get chassis-list
|
||||
2) check table structure
|
||||
"""
|
||||
chassis_list_header = self.get_table_headers('chassis-list')
|
||||
self.assertTableHeaders(['Description', 'UUID'], chassis_list_header)
|
||||
|
||||
def test_node_list_table_structure(self):
|
||||
"""Test steps:
|
||||
|
||||
1) get node-list
|
||||
2) check table structure
|
||||
"""
|
||||
node_list_header = self.get_table_headers('node-list')
|
||||
self.assertTableHeaders(['UUID', 'Name', 'Instance UUID',
|
||||
'Power State', 'Provisioning State',
|
||||
'Maintenance'], node_list_header)
|
||||
|
||||
def test_port_list_table_structure(self):
|
||||
"""Test steps:
|
||||
|
||||
1) get port-list
|
||||
2) check table structure
|
||||
"""
|
||||
port_list_header = self.get_table_headers('port-list')
|
||||
self.assertTableHeaders(['UUID', 'Address'], port_list_header)
|
||||
|
||||
def test_driver_list_table_structure(self):
|
||||
"""Test steps:
|
||||
|
||||
1) get driver-list
|
||||
2) check table structure
|
||||
"""
|
||||
driver_list_header = self.get_table_headers('driver-list')
|
||||
self.assertTableHeaders(['Supported driver(s)', 'Active host(s)'],
|
||||
driver_list_header)
|
@ -1,37 +0,0 @@
|
||||
# Copyright (c) 2015 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import six
|
||||
|
||||
|
||||
def get_dict_from_output(output):
|
||||
"""Parse list of dictionaries, return a dictionary.
|
||||
|
||||
:param output: list of dictionaries
|
||||
"""
|
||||
obj = {}
|
||||
for item in output:
|
||||
obj[item['Property']] = six.text_type(item['Value'])
|
||||
return obj
|
||||
|
||||
|
||||
def get_object(object_list, object_value):
|
||||
"""Get Ironic object by value from list of Ironic objects.
|
||||
|
||||
:param object_list: the output of the cmd
|
||||
:param object_value: value to get
|
||||
"""
|
||||
for obj in object_list:
|
||||
if object_value in obj.values():
|
||||
return obj
|
@ -1,181 +0,0 @@
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
from oslotest import base as test_base
|
||||
|
||||
from ironicclient.common.apiclient import base
|
||||
|
||||
|
||||
class HumanResource(base.Resource):
|
||||
HUMAN_ID = True
|
||||
|
||||
|
||||
class HumanResourceManager(base.ManagerWithFind):
|
||||
resource_class = HumanResource
|
||||
|
||||
def list(self):
|
||||
return self._list("/human_resources", "human_resources")
|
||||
|
||||
def get(self, human_resource):
|
||||
return self._get(
|
||||
"/human_resources/%s" % base.getid(human_resource),
|
||||
"human_resource")
|
||||
|
||||
def update(self, human_resource, name):
|
||||
body = {
|
||||
"human_resource": {
|
||||
"name": name,
|
||||
},
|
||||
}
|
||||
return self._put(
|
||||
"/human_resources/%s" % base.getid(human_resource),
|
||||
body,
|
||||
"human_resource")
|
||||
|
||||
|
||||
class CrudResource(base.Resource):
|
||||
pass
|
||||
|
||||
|
||||
class CrudResourceManager(base.CrudManager):
|
||||
"""Manager class for manipulating Identity crud_resources."""
|
||||
resource_class = CrudResource
|
||||
collection_key = 'crud_resources'
|
||||
key = 'crud_resource'
|
||||
|
||||
def get(self, crud_resource):
|
||||
return super(CrudResourceManager, self).get(
|
||||
crud_resource_id=base.getid(crud_resource))
|
||||
|
||||
|
||||
class ResourceTest(test_base.BaseTestCase):
|
||||
def test_resource_repr(self):
|
||||
r = base.Resource(None, dict(foo="bar", baz="spam"))
|
||||
self.assertEqual("<Resource baz=spam, foo=bar>", repr(r))
|
||||
|
||||
def test_getid(self):
|
||||
class TmpObject(base.Resource):
|
||||
id = "4"
|
||||
self.assertEqual("4", base.getid(TmpObject(None, {})))
|
||||
|
||||
def test_human_id(self):
|
||||
r = base.Resource(None, {"name": "1"})
|
||||
self.assertIsNone(r.human_id)
|
||||
r = HumanResource(None, {"name": "1"})
|
||||
self.assertEqual("1", r.human_id)
|
||||
r = HumanResource(None, {"name": None})
|
||||
self.assertIsNone(r.human_id)
|
||||
|
||||
def test_two_resources_with_same_id_are_not_equal(self):
|
||||
# Two resources with same ID: never equal if their info is not equal
|
||||
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
|
||||
r2 = base.Resource(None, {'id': 1, 'name': 'hello'})
|
||||
self.assertNotEqual(r1, r2)
|
||||
|
||||
def test_two_resources_with_same_id_and_info_are_equal(self):
|
||||
# Two resources with same ID: equal if their info is equal
|
||||
r1 = base.Resource(None, {'id': 1, 'name': 'hello'})
|
||||
r2 = base.Resource(None, {'id': 1, 'name': 'hello'})
|
||||
self.assertEqual(r1, r2)
|
||||
|
||||
def test_two_resources_with_diff_type_are_not_equal(self):
|
||||
# Two resoruces of different types: never equal
|
||||
r1 = base.Resource(None, {'id': 1})
|
||||
r2 = HumanResource(None, {'id': 1})
|
||||
self.assertNotEqual(r1, r2)
|
||||
|
||||
def test_two_resources_with_no_id_are_equal(self):
|
||||
# Two resources with no ID: equal if their info is equal
|
||||
r1 = base.Resource(None, {'name': 'joe', 'age': 12})
|
||||
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
|
||||
self.assertEqual(r1, r2)
|
||||
|
||||
|
||||
class BaseManagerTestCase(test_base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(BaseManagerTestCase, self).setUp()
|
||||
|
||||
self.response = mock.MagicMock()
|
||||
self.http_client = mock.MagicMock()
|
||||
self.http_client.get.return_value = self.response
|
||||
self.http_client.post.return_value = self.response
|
||||
|
||||
self.manager = base.BaseManager(self.http_client)
|
||||
self.manager.resource_class = HumanResource
|
||||
|
||||
def test_list(self):
|
||||
self.response.json.return_value = {'human_resources': [{'id': 42}]}
|
||||
expected = [HumanResource(self.manager, {'id': 42}, loaded=True)]
|
||||
result = self.manager._list("/human_resources", "human_resources")
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_list_no_response_key(self):
|
||||
self.response.json.return_value = [{'id': 42}]
|
||||
expected = [HumanResource(self.manager, {'id': 42}, loaded=True)]
|
||||
result = self.manager._list("/human_resources")
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_list_get(self):
|
||||
self.manager._list("/human_resources", "human_resources")
|
||||
self.manager.client.get.assert_called_with("/human_resources")
|
||||
|
||||
def test_list_post(self):
|
||||
self.manager._list("/human_resources", "human_resources",
|
||||
json={'id': 42})
|
||||
self.manager.client.post.assert_called_with("/human_resources",
|
||||
json={'id': 42})
|
||||
|
||||
def test_get(self):
|
||||
self.response.json.return_value = {'human_resources': {'id': 42}}
|
||||
expected = HumanResource(self.manager, {'id': 42}, loaded=True)
|
||||
result = self.manager._get("/human_resources/42", "human_resources")
|
||||
self.manager.client.get.assert_called_with("/human_resources/42")
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_get_no_response_key(self):
|
||||
self.response.json.return_value = {'id': 42}
|
||||
expected = HumanResource(self.manager, {'id': 42}, loaded=True)
|
||||
result = self.manager._get("/human_resources/42")
|
||||
self.manager.client.get.assert_called_with("/human_resources/42")
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_post(self):
|
||||
self.response.json.return_value = {'human_resources': {'id': 42}}
|
||||
expected = HumanResource(self.manager, {'id': 42}, loaded=True)
|
||||
result = self.manager._post("/human_resources",
|
||||
response_key="human_resources",
|
||||
json={'id': 42})
|
||||
self.manager.client.post.assert_called_with("/human_resources",
|
||||
json={'id': 42})
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_post_return_raw(self):
|
||||
self.response.json.return_value = {'human_resources': {'id': 42}}
|
||||
result = self.manager._post("/human_resources",
|
||||
response_key="human_resources",
|
||||
json={'id': 42}, return_raw=True)
|
||||
self.manager.client.post.assert_called_with("/human_resources",
|
||||
json={'id': 42})
|
||||
self.assertEqual({'id': 42}, result)
|
||||
|
||||
def test_post_no_response_key(self):
|
||||
self.response.json.return_value = {'id': 42}
|
||||
expected = HumanResource(self.manager, {'id': 42}, loaded=True)
|
||||
result = self.manager._post("/human_resources", json={'id': 42})
|
||||
self.manager.client.post.assert_called_with("/human_resources",
|
||||
json={'id': 42})
|
||||
self.assertEqual(expected, result)
|
@ -1,139 +0,0 @@
|
||||
# Copyright 2012 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslotest import base as test_base
|
||||
import six
|
||||
from six.moves import http_client
|
||||
|
||||
from ironicclient.common.apiclient import exceptions
|
||||
|
||||
|
||||
class FakeResponse(object):
|
||||
json_data = {}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for key, value in kwargs.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
def json(self):
|
||||
return self.json_data
|
||||
|
||||
|
||||
class ExceptionsArgsTest(test_base.BaseTestCase):
|
||||
|
||||
def assert_exception(self, ex_cls, method, url, status_code, json_data,
|
||||
error_msg=None, error_details=None,
|
||||
check_description=True):
|
||||
ex = exceptions.from_response(
|
||||
FakeResponse(status_code=status_code,
|
||||
headers={"Content-Type": "application/json"},
|
||||
json_data=json_data),
|
||||
method,
|
||||
url)
|
||||
self.assertIsInstance(ex, ex_cls)
|
||||
if check_description:
|
||||
expected_msg = error_msg or json_data["error"]["message"]
|
||||
expected_details = error_details or json_data["error"]["details"]
|
||||
self.assertEqual(expected_msg, ex.message)
|
||||
self.assertEqual(expected_details, ex.details)
|
||||
self.assertEqual(method, ex.method)
|
||||
self.assertEqual(url, ex.url)
|
||||
self.assertEqual(status_code, ex.http_status)
|
||||
|
||||
def test_from_response_known(self):
|
||||
method = "GET"
|
||||
url = "/fake"
|
||||
status_code = http_client.BAD_REQUEST
|
||||
json_data = {"error": {"message": "fake message",
|
||||
"details": "fake details"}}
|
||||
self.assert_exception(
|
||||
exceptions.BadRequest, method, url, status_code, json_data)
|
||||
|
||||
def test_from_response_unknown(self):
|
||||
method = "POST"
|
||||
url = "/fake-unknown"
|
||||
status_code = 499
|
||||
json_data = {"error": {"message": "fake unknown message",
|
||||
"details": "fake unknown details"}}
|
||||
self.assert_exception(
|
||||
exceptions.HTTPClientError, method, url, status_code, json_data)
|
||||
status_code = 600
|
||||
self.assert_exception(
|
||||
exceptions.HttpError, method, url, status_code, json_data)
|
||||
|
||||
def test_from_response_non_openstack(self):
|
||||
method = "POST"
|
||||
url = "/fake-unknown"
|
||||
status_code = http_client.BAD_REQUEST
|
||||
json_data = {"alien": 123}
|
||||
self.assert_exception(
|
||||
exceptions.BadRequest, method, url, status_code, json_data,
|
||||
check_description=False)
|
||||
|
||||
def test_from_response_with_different_response_format(self):
|
||||
method = "GET"
|
||||
url = "/fake-wsme"
|
||||
status_code = http_client.BAD_REQUEST
|
||||
json_data1 = {"error_message": {"debuginfo": None,
|
||||
"faultcode": "Client",
|
||||
"faultstring": "fake message"}}
|
||||
message = six.text_type(
|
||||
json_data1["error_message"]["faultstring"])
|
||||
details = six.text_type(json_data1)
|
||||
self.assert_exception(
|
||||
exceptions.BadRequest, method, url, status_code, json_data1,
|
||||
message, details)
|
||||
|
||||
json_data2 = {"badRequest": {"message": "fake message",
|
||||
"code": http_client.BAD_REQUEST}}
|
||||
message = six.text_type(json_data2["badRequest"]["message"])
|
||||
details = six.text_type(json_data2)
|
||||
self.assert_exception(
|
||||
exceptions.BadRequest, method, url, status_code, json_data2,
|
||||
message, details)
|
||||
|
||||
def test_from_response_with_text_response_format(self):
|
||||
method = "GET"
|
||||
url = "/fake-wsme"
|
||||
status_code = http_client.BAD_REQUEST
|
||||
text_data1 = "error_message: fake message"
|
||||
|
||||
ex = exceptions.from_response(
|
||||
FakeResponse(status_code=status_code,
|
||||
headers={"Content-Type": "text/html"},
|
||||
text=text_data1),
|
||||
method,
|
||||
url)
|
||||
self.assertIsInstance(ex, exceptions.BadRequest)
|
||||
self.assertEqual(text_data1, ex.details)
|
||||
self.assertEqual(method, ex.method)
|
||||
self.assertEqual(url, ex.url)
|
||||
self.assertEqual(status_code, ex.http_status)
|
||||
|
||||
def test_from_response_with_text_response_format_with_no_body(self):
|
||||
method = "GET"
|
||||
url = "/fake-wsme"
|
||||
status_code = http_client.UNAUTHORIZED
|
||||
|
||||
ex = exceptions.from_response(
|
||||
FakeResponse(status_code=status_code,
|
||||
headers={"Content-Type": "text/html"}),
|
||||
method,
|
||||
url)
|
||||
self.assertIsInstance(ex, exceptions.Unauthorized)
|
||||
self.assertEqual('', ex.details)
|
||||
self.assertEqual(method, ex.method)
|
||||
self.assertEqual(url, ex.url)
|
||||
self.assertEqual(status_code, ex.http_status)
|
@ -1,192 +0,0 @@
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import mock
|
||||
import testtools
|
||||
|
||||
from ironicclient.common import base
|
||||
from ironicclient import exc
|
||||
from ironicclient.tests.unit import utils
|
||||
|
||||
|
||||
TESTABLE_RESOURCE = {
|
||||
'uuid': '11111111-2222-3333-4444-555555555555',
|
||||
'attribute1': '1',
|
||||
'attribute2': '2',
|
||||
}
|
||||
|
||||
CREATE_TESTABLE_RESOURCE = copy.deepcopy(TESTABLE_RESOURCE)
|
||||
del CREATE_TESTABLE_RESOURCE['uuid']
|
||||
|
||||
INVALID_ATTRIBUTE_TESTABLE_RESOURCE = {
|
||||
'non-existent-attribute': 'blablabla',
|
||||
'attribute1': '1',
|
||||
'attribute2': '2',
|
||||
}
|
||||
|
||||
UPDATED_TESTABLE_RESOURCE = copy.deepcopy(TESTABLE_RESOURCE)
|
||||
NEW_ATTRIBUTE_VALUE = 'brand-new-attribute-value'
|
||||
UPDATED_TESTABLE_RESOURCE['attribute1'] = NEW_ATTRIBUTE_VALUE
|
||||
|
||||
fake_responses = {
|
||||
'/v1/testableresources':
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"testableresources": [TESTABLE_RESOURCE]},
|
||||
),
|
||||
'POST': (
|
||||
{},
|
||||
CREATE_TESTABLE_RESOURCE,
|
||||
),
|
||||
},
|
||||
'/v1/testableresources/%s' % TESTABLE_RESOURCE['uuid']:
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
TESTABLE_RESOURCE,
|
||||
),
|
||||
'DELETE': (
|
||||
{},
|
||||
None,
|
||||
),
|
||||
'PATCH': (
|
||||
{},
|
||||
UPDATED_TESTABLE_RESOURCE,
|
||||
),
|
||||
},
|
||||
|
||||
}
|
||||
|
||||
|
||||
class TestableResource(base.Resource):
|
||||
def __repr__(self):
|
||||
return "<TestableResource %s>" % self._info
|
||||
|
||||
|
||||
class TestableManager(base.CreateManager):
|
||||
resource_class = TestableResource
|
||||
_creation_attributes = ['attribute1', 'attribute2']
|
||||
_resource_name = 'testableresources'
|
||||
|
||||
def _path(self, id=None):
|
||||
return ('/v1/testableresources/%s' % id if id
|
||||
else '/v1/testableresources')
|
||||
|
||||
def get(self, testable_resource_id, fields=None):
|
||||
return self._get(resource_id=testable_resource_id,
|
||||
fields=fields)
|
||||
|
||||
def delete(self, testable_resource_id):
|
||||
return self._delete(resource_id=testable_resource_id)
|
||||
|
||||
def update(self, testable_resource_id, patch):
|
||||
return self._update(resource_id=testable_resource_id,
|
||||
patch=patch)
|
||||
|
||||
|
||||
class ManagerTestCase(testtools.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(ManagerTestCase, self).setUp()
|
||||
self.api = utils.FakeAPI(fake_responses)
|
||||
self.manager = TestableManager(self.api)
|
||||
|
||||
def test_create(self):
|
||||
resource = self.manager.create(**CREATE_TESTABLE_RESOURCE)
|
||||
expect = [
|
||||
('POST', '/v1/testableresources', {}, CREATE_TESTABLE_RESOURCE),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertTrue(resource)
|
||||
self.assertIsInstance(resource, TestableResource)
|
||||
|
||||
def test_create_with_invalid_attribute(self):
|
||||
self.assertRaisesRegex(exc.InvalidAttribute, "non-existent-attribute",
|
||||
self.manager.create,
|
||||
**INVALID_ATTRIBUTE_TESTABLE_RESOURCE)
|
||||
|
||||
def test__get(self):
|
||||
resource_id = TESTABLE_RESOURCE['uuid']
|
||||
resource = self.manager._get(resource_id)
|
||||
expect = [
|
||||
('GET', '/v1/testableresources/%s' % resource_id,
|
||||
{}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(resource_id, resource.uuid)
|
||||
self.assertEqual(TESTABLE_RESOURCE['attribute1'], resource.attribute1)
|
||||
|
||||
def test__get_invalid_resource_id_raises(self):
|
||||
resource_ids = [[], {}, False, '', 0, None, ()]
|
||||
for resource_id in resource_ids:
|
||||
self.assertRaises(exc.ValidationError, self.manager._get,
|
||||
resource_id=resource_id)
|
||||
|
||||
def test__get_as_dict(self):
|
||||
resource_id = TESTABLE_RESOURCE['uuid']
|
||||
resource = self.manager._get_as_dict(resource_id)
|
||||
expect = [
|
||||
('GET', '/v1/testableresources/%s' % resource_id,
|
||||
{}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(TESTABLE_RESOURCE, resource)
|
||||
|
||||
@mock.patch.object(base.Manager, '_get', autospec=True)
|
||||
def test__get_as_dict_empty(self, mock_get):
|
||||
mock_get.return_value = None
|
||||
resource_id = TESTABLE_RESOURCE['uuid']
|
||||
resource = self.manager._get_as_dict(resource_id)
|
||||
mock_get.assert_called_once_with(mock.ANY, resource_id, fields=None)
|
||||
self.assertEqual({}, resource)
|
||||
|
||||
def test_get(self):
|
||||
resource = self.manager.get(TESTABLE_RESOURCE['uuid'])
|
||||
expect = [
|
||||
('GET', '/v1/testableresources/%s' % TESTABLE_RESOURCE['uuid'],
|
||||
{}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(TESTABLE_RESOURCE['uuid'], resource.uuid)
|
||||
self.assertEqual(TESTABLE_RESOURCE['attribute1'], resource.attribute1)
|
||||
|
||||
def test_update(self):
|
||||
patch = {'op': 'replace',
|
||||
'value': NEW_ATTRIBUTE_VALUE,
|
||||
'path': '/attribute1'}
|
||||
resource = self.manager.update(
|
||||
testable_resource_id=TESTABLE_RESOURCE['uuid'],
|
||||
patch=patch
|
||||
)
|
||||
expect = [
|
||||
('PATCH', '/v1/testableresources/%s' % TESTABLE_RESOURCE['uuid'],
|
||||
{}, patch),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(NEW_ATTRIBUTE_VALUE, resource.attribute1)
|
||||
|
||||
def test_delete(self):
|
||||
resource = self.manager.delete(
|
||||
testable_resource_id=TESTABLE_RESOURCE['uuid']
|
||||
)
|
||||
expect = [
|
||||
('DELETE', '/v1/testableresources/%s' % TESTABLE_RESOURCE['uuid'],
|
||||
{}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertIsNone(resource)
|
@ -1,740 +0,0 @@
|
||||
# Copyright 2012 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
import fixtures
|
||||
import mock
|
||||
from oslotest import base as test_base
|
||||
import six
|
||||
|
||||
from ironicclient.common import cliutils
|
||||
|
||||
|
||||
class ValidateArgsTest(test_base.BaseTestCase):
|
||||
|
||||
def test_lambda_no_args(self):
|
||||
cliutils.validate_args(lambda: None)
|
||||
|
||||
def _test_lambda_with_args(self, *args, **kwargs):
|
||||
cliutils.validate_args(lambda x, y: None, *args, **kwargs)
|
||||
|
||||
def test_lambda_positional_args(self):
|
||||
self._test_lambda_with_args(1, 2)
|
||||
|
||||
def test_lambda_kwargs(self):
|
||||
self._test_lambda_with_args(x=1, y=2)
|
||||
|
||||
def test_lambda_mixed_kwargs(self):
|
||||
self._test_lambda_with_args(1, y=2)
|
||||
|
||||
def test_lambda_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_args)
|
||||
|
||||
def test_lambda_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_args, 1)
|
||||
|
||||
def test_lambda_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_args, y=2)
|
||||
|
||||
def _test_lambda_with_default(self, *args, **kwargs):
|
||||
cliutils.validate_args(lambda x, y, z=3: None, *args, **kwargs)
|
||||
|
||||
def test_lambda_positional_args_with_default(self):
|
||||
self._test_lambda_with_default(1, 2)
|
||||
|
||||
def test_lambda_kwargs_with_default(self):
|
||||
self._test_lambda_with_default(x=1, y=2)
|
||||
|
||||
def test_lambda_mixed_kwargs_with_default(self):
|
||||
self._test_lambda_with_default(1, y=2)
|
||||
|
||||
def test_lambda_positional_args_all_with_default(self):
|
||||
self._test_lambda_with_default(1, 2, 3)
|
||||
|
||||
def test_lambda_kwargs_all_with_default(self):
|
||||
self._test_lambda_with_default(x=1, y=2, z=3)
|
||||
|
||||
def test_lambda_mixed_kwargs_all_with_default(self):
|
||||
self._test_lambda_with_default(1, y=2, z=3)
|
||||
|
||||
def test_lambda_with_default_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_default)
|
||||
|
||||
def test_lambda_with_default_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_default, 1)
|
||||
|
||||
def test_lambda_with_default_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_default, y=2)
|
||||
|
||||
def test_lambda_with_default_missing_args4(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_default, y=2, z=3)
|
||||
|
||||
def test_function_no_args(self):
|
||||
def func():
|
||||
pass
|
||||
cliutils.validate_args(func)
|
||||
|
||||
def _test_function_with_args(self, *args, **kwargs):
|
||||
def func(x, y):
|
||||
pass
|
||||
cliutils.validate_args(func, *args, **kwargs)
|
||||
|
||||
def test_function_positional_args(self):
|
||||
self._test_function_with_args(1, 2)
|
||||
|
||||
def test_function_kwargs(self):
|
||||
self._test_function_with_args(x=1, y=2)
|
||||
|
||||
def test_function_mixed_kwargs(self):
|
||||
self._test_function_with_args(1, y=2)
|
||||
|
||||
def test_function_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_args)
|
||||
|
||||
def test_function_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_args, 1)
|
||||
|
||||
def test_function_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_args, y=2)
|
||||
|
||||
def _test_function_with_default(self, *args, **kwargs):
|
||||
def func(x, y, z=3):
|
||||
pass
|
||||
cliutils.validate_args(func, *args, **kwargs)
|
||||
|
||||
def test_function_positional_args_with_default(self):
|
||||
self._test_function_with_default(1, 2)
|
||||
|
||||
def test_function_kwargs_with_default(self):
|
||||
self._test_function_with_default(x=1, y=2)
|
||||
|
||||
def test_function_mixed_kwargs_with_default(self):
|
||||
self._test_function_with_default(1, y=2)
|
||||
|
||||
def test_function_positional_args_all_with_default(self):
|
||||
self._test_function_with_default(1, 2, 3)
|
||||
|
||||
def test_function_kwargs_all_with_default(self):
|
||||
self._test_function_with_default(x=1, y=2, z=3)
|
||||
|
||||
def test_function_mixed_kwargs_all_with_default(self):
|
||||
self._test_function_with_default(1, y=2, z=3)
|
||||
|
||||
def test_function_with_default_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_default)
|
||||
|
||||
def test_function_with_default_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_default, 1)
|
||||
|
||||
def test_function_with_default_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_default, y=2)
|
||||
|
||||
def test_function_with_default_missing_args4(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_default, y=2, z=3)
|
||||
|
||||
def test_bound_method_no_args(self):
|
||||
class Foo(object):
|
||||
def bar(self):
|
||||
pass
|
||||
cliutils.validate_args(Foo().bar)
|
||||
|
||||
def _test_bound_method_with_args(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
def bar(self, x, y):
|
||||
pass
|
||||
cliutils.validate_args(Foo().bar, *args, **kwargs)
|
||||
|
||||
def test_bound_method_positional_args(self):
|
||||
self._test_bound_method_with_args(1, 2)
|
||||
|
||||
def test_bound_method_kwargs(self):
|
||||
self._test_bound_method_with_args(x=1, y=2)
|
||||
|
||||
def test_bound_method_mixed_kwargs(self):
|
||||
self._test_bound_method_with_args(1, y=2)
|
||||
|
||||
def test_bound_method_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_args)
|
||||
|
||||
def test_bound_method_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_args, 1)
|
||||
|
||||
def test_bound_method_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_args, y=2)
|
||||
|
||||
def _test_bound_method_with_default(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
def bar(self, x, y, z=3):
|
||||
pass
|
||||
cliutils.validate_args(Foo().bar, *args, **kwargs)
|
||||
|
||||
def test_bound_method_positional_args_with_default(self):
|
||||
self._test_bound_method_with_default(1, 2)
|
||||
|
||||
def test_bound_method_kwargs_with_default(self):
|
||||
self._test_bound_method_with_default(x=1, y=2)
|
||||
|
||||
def test_bound_method_mixed_kwargs_with_default(self):
|
||||
self._test_bound_method_with_default(1, y=2)
|
||||
|
||||
def test_bound_method_positional_args_all_with_default(self):
|
||||
self._test_bound_method_with_default(1, 2, 3)
|
||||
|
||||
def test_bound_method_kwargs_all_with_default(self):
|
||||
self._test_bound_method_with_default(x=1, y=2, z=3)
|
||||
|
||||
def test_bound_method_mixed_kwargs_all_with_default(self):
|
||||
self._test_bound_method_with_default(1, y=2, z=3)
|
||||
|
||||
def test_bound_method_with_default_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_default)
|
||||
|
||||
def test_bound_method_with_default_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_default, 1)
|
||||
|
||||
def test_bound_method_with_default_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_default, y=2)
|
||||
|
||||
def test_bound_method_with_default_missing_args4(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_default, y=2, z=3)
|
||||
|
||||
def test_unbound_method_no_args(self):
|
||||
class Foo(object):
|
||||
def bar(self):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, Foo())
|
||||
|
||||
def _test_unbound_method_with_args(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
def bar(self, x, y):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, Foo(), *args, **kwargs)
|
||||
|
||||
def test_unbound_method_positional_args(self):
|
||||
self._test_unbound_method_with_args(1, 2)
|
||||
|
||||
def test_unbound_method_kwargs(self):
|
||||
self._test_unbound_method_with_args(x=1, y=2)
|
||||
|
||||
def test_unbound_method_mixed_kwargs(self):
|
||||
self._test_unbound_method_with_args(1, y=2)
|
||||
|
||||
def test_unbound_method_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_args)
|
||||
|
||||
def test_unbound_method_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_args, 1)
|
||||
|
||||
def test_unbound_method_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_args, y=2)
|
||||
|
||||
def _test_unbound_method_with_default(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
def bar(self, x, y, z=3):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, Foo(), *args, **kwargs)
|
||||
|
||||
def test_unbound_method_positional_args_with_default(self):
|
||||
self._test_unbound_method_with_default(1, 2)
|
||||
|
||||
def test_unbound_method_kwargs_with_default(self):
|
||||
self._test_unbound_method_with_default(x=1, y=2)
|
||||
|
||||
def test_unbound_method_mixed_kwargs_with_default(self):
|
||||
self._test_unbound_method_with_default(1, y=2)
|
||||
|
||||
def test_unbound_method_with_default_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_default)
|
||||
|
||||
def test_unbound_method_with_default_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_default, 1)
|
||||
|
||||
def test_unbound_method_with_default_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_default, y=2)
|
||||
|
||||
def test_unbound_method_with_default_missing_args4(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_default, y=2, z=3)
|
||||
|
||||
def test_class_method_no_args(self):
|
||||
class Foo(object):
|
||||
@classmethod
|
||||
def bar(cls):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar)
|
||||
|
||||
def _test_class_method_with_args(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
@classmethod
|
||||
def bar(cls, x, y):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, *args, **kwargs)
|
||||
|
||||
def test_class_method_positional_args(self):
|
||||
self._test_class_method_with_args(1, 2)
|
||||
|
||||
def test_class_method_kwargs(self):
|
||||
self._test_class_method_with_args(x=1, y=2)
|
||||
|
||||
def test_class_method_mixed_kwargs(self):
|
||||
self._test_class_method_with_args(1, y=2)
|
||||
|
||||
def test_class_method_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_args)
|
||||
|
||||
def test_class_method_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_args, 1)
|
||||
|
||||
def test_class_method_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_args, y=2)
|
||||
|
||||
def _test_class_method_with_default(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
@classmethod
|
||||
def bar(cls, x, y, z=3):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, *args, **kwargs)
|
||||
|
||||
def test_class_method_positional_args_with_default(self):
|
||||
self._test_class_method_with_default(1, 2)
|
||||
|
||||
def test_class_method_kwargs_with_default(self):
|
||||
self._test_class_method_with_default(x=1, y=2)
|
||||
|
||||
def test_class_method_mixed_kwargs_with_default(self):
|
||||
self._test_class_method_with_default(1, y=2)
|
||||
|
||||
def test_class_method_with_default_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_default)
|
||||
|
||||
def test_class_method_with_default_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_default, 1)
|
||||
|
||||
def test_class_method_with_default_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_default, y=2)
|
||||
|
||||
def test_class_method_with_default_missing_args4(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_default, y=2, z=3)
|
||||
|
||||
def test_static_method_no_args(self):
|
||||
class Foo(object):
|
||||
@staticmethod
|
||||
def bar():
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar)
|
||||
|
||||
def _test_static_method_with_args(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
@staticmethod
|
||||
def bar(x, y):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, *args, **kwargs)
|
||||
|
||||
def test_static_method_positional_args(self):
|
||||
self._test_static_method_with_args(1, 2)
|
||||
|
||||
def test_static_method_kwargs(self):
|
||||
self._test_static_method_with_args(x=1, y=2)
|
||||
|
||||
def test_static_method_mixed_kwargs(self):
|
||||
self._test_static_method_with_args(1, y=2)
|
||||
|
||||
def test_static_method_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_args)
|
||||
|
||||
def test_static_method_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_args, 1)
|
||||
|
||||
def test_static_method_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_args, y=2)
|
||||
|
||||
def _test_static_method_with_default(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
@staticmethod
|
||||
def bar(x, y, z=3):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, *args, **kwargs)
|
||||
|
||||
def test_static_method_positional_args_with_default(self):
|
||||
self._test_static_method_with_default(1, 2)
|
||||
|
||||
def test_static_method_kwargs_with_default(self):
|
||||
self._test_static_method_with_default(x=1, y=2)
|
||||
|
||||
def test_static_method_mixed_kwargs_with_default(self):
|
||||
self._test_static_method_with_default(1, y=2)
|
||||
|
||||
def test_static_method_with_default_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_default)
|
||||
|
||||
def test_static_method_with_default_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_default, 1)
|
||||
|
||||
def test_static_method_with_default_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_default, y=2)
|
||||
|
||||
def test_static_method_with_default_missing_args4(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_default, y=2, z=3)
|
||||
|
||||
|
||||
class _FakeResult(object):
|
||||
def __init__(self, name, value):
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
|
||||
class PrintResultTestCase(test_base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(PrintResultTestCase, self).setUp()
|
||||
self.mock_add_row = mock.MagicMock()
|
||||
self.useFixture(fixtures.MonkeyPatch(
|
||||
"prettytable.PrettyTable.add_row",
|
||||
self.mock_add_row))
|
||||
self.mock_get_string = mock.MagicMock(return_value="")
|
||||
self.useFixture(fixtures.MonkeyPatch(
|
||||
"prettytable.PrettyTable.get_string",
|
||||
self.mock_get_string))
|
||||
self.mock_init = mock.MagicMock(return_value=None)
|
||||
self.useFixture(fixtures.MonkeyPatch(
|
||||
"prettytable.PrettyTable.__init__",
|
||||
self.mock_init))
|
||||
# NOTE(dtantsur): won't work with mocked __init__
|
||||
self.useFixture(fixtures.MonkeyPatch(
|
||||
"prettytable.PrettyTable.align",
|
||||
mock.MagicMock()))
|
||||
|
||||
def test_print_list_sort_by_str(self):
|
||||
objs = [_FakeResult("k1", 1),
|
||||
_FakeResult("k3", 2),
|
||||
_FakeResult("k2", 3)]
|
||||
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=0)
|
||||
|
||||
self.assertEqual(self.mock_add_row.call_args_list,
|
||||
[mock.call(["k1", 1]),
|
||||
mock.call(["k3", 2]),
|
||||
mock.call(["k2", 3])])
|
||||
self.mock_get_string.assert_called_with(sortby="Name")
|
||||
self.mock_init.assert_called_once_with(["Name", "Value"])
|
||||
|
||||
def test_print_list_sort_by_integer(self):
|
||||
objs = [_FakeResult("k1", 1),
|
||||
_FakeResult("k2", 3),
|
||||
_FakeResult("k3", 2)]
|
||||
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=1)
|
||||
|
||||
self.assertEqual(self.mock_add_row.call_args_list,
|
||||
[mock.call(["k1", 1]),
|
||||
mock.call(["k2", 3]),
|
||||
mock.call(["k3", 2])])
|
||||
self.mock_get_string.assert_called_with(sortby="Value")
|
||||
self.mock_init.assert_called_once_with(["Name", "Value"])
|
||||
|
||||
def test_print_list_sort_by_none(self):
|
||||
objs = [_FakeResult("k1", 1),
|
||||
_FakeResult("k3", 3),
|
||||
_FakeResult("k2", 2)]
|
||||
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=None)
|
||||
|
||||
self.assertEqual(self.mock_add_row.call_args_list,
|
||||
[mock.call(["k1", 1]),
|
||||
mock.call(["k3", 3]),
|
||||
mock.call(["k2", 2])])
|
||||
self.mock_get_string.assert_called_with()
|
||||
self.mock_init.assert_called_once_with(["Name", "Value"])
|
||||
|
||||
def test_print_list_dict(self):
|
||||
objs = [{'name': 'k1', 'value': 1},
|
||||
{'name': 'k2', 'value': 2}]
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=None)
|
||||
|
||||
self.assertEqual(self.mock_add_row.call_args_list,
|
||||
[mock.call(["k1", 1]),
|
||||
mock.call(["k2", 2])])
|
||||
self.mock_get_string.assert_called_with()
|
||||
self.mock_init.assert_called_once_with(["Name", "Value"])
|
||||
|
||||
def test_print_dict(self):
|
||||
cliutils.print_dict({"K": "k", "Key": "Value"})
|
||||
cliutils.print_dict({"K": "k", "Key": "Long\\nValue"})
|
||||
self.mock_add_row.assert_has_calls([
|
||||
mock.call(["K", "k"]),
|
||||
mock.call(["Key", "Value"]),
|
||||
mock.call(["K", "k"]),
|
||||
mock.call(["Key", "Long"]),
|
||||
mock.call(["", "Value"])],
|
||||
any_order=True)
|
||||
|
||||
def test_print_list_field_labels(self):
|
||||
objs = [_FakeResult("k1", 1),
|
||||
_FakeResult("k3", 3),
|
||||
_FakeResult("k2", 2)]
|
||||
field_labels = ["Another Name", "Another Value"]
|
||||
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=None,
|
||||
field_labels=field_labels)
|
||||
|
||||
self.assertEqual(self.mock_add_row.call_args_list,
|
||||
[mock.call(["k1", 1]),
|
||||
mock.call(["k3", 3]),
|
||||
mock.call(["k2", 2])])
|
||||
self.mock_init.assert_called_once_with(field_labels)
|
||||
|
||||
def test_print_list_field_labels_sort(self):
|
||||
objs = [_FakeResult("k1", 1),
|
||||
_FakeResult("k3", 3),
|
||||
_FakeResult("k2", 2)]
|
||||
field_labels = ["Another Name", "Another Value"]
|
||||
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=0,
|
||||
field_labels=field_labels)
|
||||
|
||||
self.assertEqual(self.mock_add_row.call_args_list,
|
||||
[mock.call(["k1", 1]),
|
||||
mock.call(["k3", 3]),
|
||||
mock.call(["k2", 2])])
|
||||
self.mock_init.assert_called_once_with(field_labels)
|
||||
self.mock_get_string.assert_called_with(sortby="Another Name")
|
||||
|
||||
def test_print_list_field_labels_too_many(self):
|
||||
objs = [_FakeResult("k1", 1),
|
||||
_FakeResult("k3", 3),
|
||||
_FakeResult("k2", 2)]
|
||||
field_labels = ["Another Name", "Another Value", "Redundant"]
|
||||
|
||||
self.assertRaises(ValueError, cliutils.print_list,
|
||||
objs, ["Name", "Value"], sortby_index=None,
|
||||
field_labels=field_labels)
|
||||
|
||||
|
||||
class PrintResultStringTestCase(test_base.BaseTestCase):
|
||||
|
||||
def test_print_list_string(self):
|
||||
objs = [_FakeResult("k1", 1)]
|
||||
field_labels = ["Another Name", "Another Value"]
|
||||
|
||||
orig = sys.stdout
|
||||
sys.stdout = six.StringIO()
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=0,
|
||||
field_labels=field_labels)
|
||||
out = sys.stdout.getvalue()
|
||||
sys.stdout.close()
|
||||
sys.stdout = orig
|
||||
expected = '''\
|
||||
+--------------+---------------+
|
||||
| Another Name | Another Value |
|
||||
+--------------+---------------+
|
||||
| k1 | 1 |
|
||||
+--------------+---------------+
|
||||
'''
|
||||
self.assertEqual(expected, out)
|
||||
|
||||
def test_print_list_string_json(self):
|
||||
objs = [_FakeResult("k1", 1)]
|
||||
field_labels = ["Another Name", "Another Value"]
|
||||
|
||||
orig = sys.stdout
|
||||
sys.stdout = six.StringIO()
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=0,
|
||||
field_labels=field_labels, json_flag=True)
|
||||
out = sys.stdout.getvalue()
|
||||
sys.stdout.close()
|
||||
sys.stdout = orig
|
||||
|
||||
expected = [{"name": "k1", "value": 1}]
|
||||
self.assertEqual(expected, json.loads(out))
|
||||
|
||||
def test_print_dict_string(self):
|
||||
orig = sys.stdout
|
||||
sys.stdout = six.StringIO()
|
||||
cliutils.print_dict({"K": "k", "Key": "Value"})
|
||||
out = sys.stdout.getvalue()
|
||||
sys.stdout.close()
|
||||
sys.stdout = orig
|
||||
expected = '''\
|
||||
+----------+-------+
|
||||
| Property | Value |
|
||||
+----------+-------+
|
||||
| K | k |
|
||||
| Key | Value |
|
||||
+----------+-------+
|
||||
'''
|
||||
self.assertEqual(expected, out)
|
||||
|
||||
def test_print_dict_string_json(self):
|
||||
orig = sys.stdout
|
||||
sys.stdout = six.StringIO()
|
||||
cliutils.print_dict({"K": "k", "Key": "Value"}, json_flag=True)
|
||||
out = sys.stdout.getvalue()
|
||||
sys.stdout.close()
|
||||
sys.stdout = orig
|
||||
expected = {"K": "k", "Key": "Value"}
|
||||
self.assertEqual(expected, json.loads(out))
|
||||
|
||||
def test_print_dict_string_custom_headers(self):
|
||||
orig = sys.stdout
|
||||
sys.stdout = six.StringIO()
|
||||
cliutils.print_dict({"K": "k", "Key": "Value"}, dict_property='Foo',
|
||||
dict_value='Bar')
|
||||
out = sys.stdout.getvalue()
|
||||
sys.stdout.close()
|
||||
sys.stdout = orig
|
||||
expected = '''\
|
||||
+-----+-------+
|
||||
| Foo | Bar |
|
||||
+-----+-------+
|
||||
| K | k |
|
||||
| Key | Value |
|
||||
+-----+-------+
|
||||
'''
|
||||
self.assertEqual(expected, out)
|
||||
|
||||
def test_print_dict_string_sorted(self):
|
||||
orig = sys.stdout
|
||||
sys.stdout = six.StringIO()
|
||||
cliutils.print_dict({"Foo": "k", "Bar": "Value"})
|
||||
out = sys.stdout.getvalue()
|
||||
sys.stdout.close()
|
||||
sys.stdout = orig
|
||||
expected = '''\
|
||||
+----------+-------+
|
||||
| Property | Value |
|
||||
+----------+-------+
|
||||
| Bar | Value |
|
||||
| Foo | k |
|
||||
+----------+-------+
|
||||
'''
|
||||
self.assertEqual(expected, out)
|
||||
|
||||
def test_print_dict_negative_wrap(self):
|
||||
dct = {"K": "k", "Key": "Value"}
|
||||
self.assertRaises(ValueError, cliutils.print_dict, dct, wrap=-10)
|
||||
|
||||
|
||||
class DecoratorsTestCase(test_base.BaseTestCase):
|
||||
|
||||
def test_arg(self):
|
||||
func_args = [("--image", ), ("--flavor", )]
|
||||
func_kwargs = [dict(default=None,
|
||||
metavar="<image>"),
|
||||
dict(default=None,
|
||||
metavar="<flavor>")]
|
||||
|
||||
@cliutils.arg(*func_args[1], **func_kwargs[1])
|
||||
@cliutils.arg(*func_args[0], **func_kwargs[0])
|
||||
def dummy_func():
|
||||
pass
|
||||
|
||||
self.assertTrue(hasattr(dummy_func, "arguments"))
|
||||
self.assertEqual(len(dummy_func.arguments), 2)
|
||||
for args_kwargs in zip(func_args, func_kwargs):
|
||||
self.assertIn(args_kwargs, dummy_func.arguments)
|
||||
|
||||
def test_unauthenticated(self):
|
||||
def dummy_func():
|
||||
pass
|
||||
|
||||
self.assertFalse(cliutils.isunauthenticated(dummy_func))
|
||||
dummy_func = cliutils.unauthenticated(dummy_func)
|
||||
self.assertTrue(cliutils.isunauthenticated(dummy_func))
|
||||
|
||||
|
||||
class EnvTestCase(test_base.BaseTestCase):
|
||||
|
||||
def test_env(self):
|
||||
env = {"alpha": "a", "beta": "b"}
|
||||
self.useFixture(fixtures.MonkeyPatch("os.environ", env))
|
||||
self.assertEqual(env["beta"], cliutils.env("beta"))
|
||||
self.assertEqual(env["beta"], cliutils.env("beta", "alpha"))
|
||||
self.assertEqual(env["alpha"], cliutils.env("alpha", "beta"))
|
||||
self.assertEqual(env["beta"], cliutils.env("gamma", "beta"))
|
||||
self.assertEqual("", cliutils.env("gamma"))
|
||||
self.assertEqual("c", cliutils.env("gamma", default="c"))
|
||||
|
||||
|
||||
class GetPasswordTestCase(test_base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(GetPasswordTestCase, self).setUp()
|
||||
|
||||
class FakeFile(object):
|
||||
def isatty(self):
|
||||
return True
|
||||
|
||||
self.useFixture(fixtures.MonkeyPatch("sys.stdin", FakeFile()))
|
||||
|
||||
def test_get_password(self):
|
||||
self.useFixture(fixtures.MonkeyPatch("getpass.getpass",
|
||||
lambda prompt: "mellon"))
|
||||
self.assertEqual("mellon", cliutils.get_password())
|
||||
|
||||
def test_get_password_verify(self):
|
||||
env = {"OS_VERIFY_PASSWORD": "True"}
|
||||
self.useFixture(fixtures.MonkeyPatch("os.environ", env))
|
||||
self.useFixture(fixtures.MonkeyPatch("getpass.getpass",
|
||||
lambda prompt: "mellon"))
|
||||
self.assertEqual("mellon", cliutils.get_password())
|
||||
|
||||
def test_get_password_verify_failure(self):
|
||||
env = {"OS_VERIFY_PASSWORD": "True"}
|
||||
self.useFixture(fixtures.MonkeyPatch("os.environ", env))
|
||||
self.useFixture(fixtures.MonkeyPatch("getpass.getpass",
|
||||
lambda prompt: prompt))
|
||||
self.assertIsNone(cliutils.get_password())
|
@ -1,178 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
|
||||
import dogpile.cache
|
||||
import mock
|
||||
|
||||
from ironicclient.common import filecache
|
||||
from ironicclient.tests.unit import utils
|
||||
|
||||
|
||||
class FileCacheTest(utils.BaseTestCase):
|
||||
|
||||
def test__build_key_ok(self):
|
||||
result = filecache._build_key('localhost', '5000')
|
||||
self.assertEqual('localhost:5000', result)
|
||||
|
||||
def test__build_key_none(self):
|
||||
result = filecache._build_key(None, None)
|
||||
self.assertEqual('None:None', result)
|
||||
|
||||
@mock.patch.object(os.environ, 'get', autospec=True)
|
||||
@mock.patch.object(os.path, 'exists', autospec=True)
|
||||
@mock.patch.object(os, 'makedirs', autospec=True)
|
||||
@mock.patch.object(dogpile.cache, 'make_region', autospec=True)
|
||||
def test__get_cache_mkdir(self, mock_makeregion, mock_makedirs,
|
||||
mock_exists, mock_get):
|
||||
cache_val = 6
|
||||
# If not present in the env, get will return the defaulted value
|
||||
mock_get.return_value = filecache.DEFAULT_EXPIRY
|
||||
filecache.CACHE = None
|
||||
mock_exists.return_value = False
|
||||
cache_region = mock.Mock(spec=dogpile.cache.region.CacheRegion)
|
||||
cache_region.configure.return_value = cache_val
|
||||
mock_makeregion.return_value = cache_region
|
||||
self.assertEqual(cache_val, filecache._get_cache())
|
||||
mock_exists.assert_called_once_with(filecache.CACHE_DIR)
|
||||
mock_makedirs.assert_called_once_with(filecache.CACHE_DIR)
|
||||
mock_get.assert_called_once_with(filecache.CACHE_EXPIRY_ENV_VAR,
|
||||
mock.ANY)
|
||||
cache_region.configure.assert_called_once_with(
|
||||
mock.ANY,
|
||||
arguments=mock.ANY,
|
||||
expiration_time=filecache.DEFAULT_EXPIRY)
|
||||
|
||||
@mock.patch.object(os.environ, 'get', autospec=True)
|
||||
@mock.patch.object(os.path, 'exists', autospec=True)
|
||||
@mock.patch.object(os, 'makedirs', autospec=True)
|
||||
@mock.patch.object(dogpile.cache, 'make_region', autospec=True)
|
||||
def test__get_cache_expiry_set(self, mock_makeregion, mock_makedirs,
|
||||
mock_exists, mock_get):
|
||||
cache_val = 5643
|
||||
cache_expiry = '78'
|
||||
mock_get.return_value = cache_expiry
|
||||
filecache.CACHE = None
|
||||
mock_exists.return_value = False
|
||||
cache_region = mock.Mock(spec=dogpile.cache.region.CacheRegion)
|
||||
cache_region.configure.return_value = cache_val
|
||||
mock_makeregion.return_value = cache_region
|
||||
self.assertEqual(cache_val, filecache._get_cache())
|
||||
mock_get.assert_called_once_with(filecache.CACHE_EXPIRY_ENV_VAR,
|
||||
mock.ANY)
|
||||
cache_region.configure.assert_called_once_with(
|
||||
mock.ANY,
|
||||
arguments=mock.ANY,
|
||||
expiration_time=int(cache_expiry))
|
||||
|
||||
@mock.patch.object(filecache.LOG, 'warning', autospec=True)
|
||||
@mock.patch.object(os.environ, 'get', autospec=True)
|
||||
@mock.patch.object(os.path, 'exists', autospec=True)
|
||||
@mock.patch.object(os, 'makedirs', autospec=True)
|
||||
@mock.patch.object(dogpile.cache, 'make_region', autospec=True)
|
||||
def test__get_cache_expiry_set_invalid(self, mock_makeregion,
|
||||
mock_makedirs, mock_exists,
|
||||
mock_get, mock_log):
|
||||
cache_val = 5643
|
||||
cache_expiry = 'Rollenhagen'
|
||||
mock_get.return_value = cache_expiry
|
||||
filecache.CACHE = None
|
||||
mock_exists.return_value = False
|
||||
cache_region = mock.Mock(spec=dogpile.cache.region.CacheRegion)
|
||||
cache_region.configure.return_value = cache_val
|
||||
mock_makeregion.return_value = cache_region
|
||||
self.assertEqual(cache_val, filecache._get_cache())
|
||||
mock_get.assert_called_once_with(filecache.CACHE_EXPIRY_ENV_VAR,
|
||||
mock.ANY)
|
||||
cache_region.configure.assert_called_once_with(
|
||||
mock.ANY,
|
||||
arguments=mock.ANY,
|
||||
expiration_time=filecache.DEFAULT_EXPIRY)
|
||||
log_dict = {'curr_val': cache_expiry,
|
||||
'default': filecache.DEFAULT_EXPIRY,
|
||||
'env_var': filecache.CACHE_EXPIRY_ENV_VAR}
|
||||
mock_log.assert_called_once_with(mock.ANY, log_dict)
|
||||
|
||||
@mock.patch.object(os.path, 'exists', autospec=True)
|
||||
@mock.patch.object(os, 'makedirs', autospec=True)
|
||||
def test__get_cache_dir_already_exists(self, mock_makedirs, mock_exists):
|
||||
cache_val = 5552368
|
||||
mock_exists.return_value = True
|
||||
filecache.CACHE = cache_val
|
||||
self.assertEqual(cache_val, filecache._get_cache())
|
||||
self.assertEqual(0, mock_exists.call_count)
|
||||
self.assertEqual(0, mock_makedirs.call_count)
|
||||
|
||||
@mock.patch.object(dogpile.cache.region, 'CacheRegion', autospec=True)
|
||||
@mock.patch.object(filecache, '_get_cache', autospec=True)
|
||||
def test_save_data_ok(self, mock_get_cache, mock_cache):
|
||||
mock_get_cache.return_value = mock_cache
|
||||
host = 'fred'
|
||||
port = '1234'
|
||||
hostport = '%s:%s' % (host, port)
|
||||
data = 'some random data'
|
||||
filecache.save_data(host, port, data)
|
||||
mock_cache.set.assert_called_once_with(hostport, data)
|
||||
|
||||
@mock.patch.object(os.path, 'isfile', autospec=True)
|
||||
@mock.patch.object(dogpile.cache.region, 'CacheRegion', autospec=True)
|
||||
@mock.patch.object(filecache, '_get_cache', autospec=True)
|
||||
def test_retrieve_data_ok(self, mock_get_cache, mock_cache, mock_isfile):
|
||||
s = 'spam'
|
||||
mock_isfile.return_value = True
|
||||
mock_cache.get.return_value = s
|
||||
mock_get_cache.return_value = mock_cache
|
||||
host = 'fred'
|
||||
port = '1234'
|
||||
hostport = '%s:%s' % (host, port)
|
||||
result = filecache.retrieve_data(host, port)
|
||||
mock_cache.get.assert_called_once_with(hostport, expiration_time=None)
|
||||
self.assertEqual(s, result)
|
||||
|
||||
@mock.patch.object(os.path, 'isfile', autospec=True)
|
||||
@mock.patch.object(dogpile.cache.region, 'CacheRegion', autospec=True)
|
||||
@mock.patch.object(filecache, '_get_cache', autospec=True)
|
||||
def test_retrieve_data_ok_with_expiry(self, mock_get_cache, mock_cache,
|
||||
mock_isfile):
|
||||
s = 'spam'
|
||||
mock_isfile.return_value = True
|
||||
mock_cache.get.return_value = s
|
||||
mock_get_cache.return_value = mock_cache
|
||||
host = 'fred'
|
||||
port = '1234'
|
||||
expiry = '987'
|
||||
hostport = '%s:%s' % (host, port)
|
||||
result = filecache.retrieve_data(host, port, expiry)
|
||||
mock_cache.get.assert_called_once_with(hostport,
|
||||
expiration_time=expiry)
|
||||
self.assertEqual(s, result)
|
||||
|
||||
@mock.patch.object(os.path, 'isfile', autospec=True)
|
||||
@mock.patch.object(dogpile.cache.region, 'CacheRegion', autospec=True)
|
||||
@mock.patch.object(filecache, '_get_cache', autospec=True)
|
||||
def test_retrieve_data_not_found(self, mock_get_cache, mock_cache,
|
||||
mock_isfile):
|
||||
mock_isfile.return_value = True
|
||||
mock_cache.get.return_value = dogpile.cache.api.NO_VALUE
|
||||
mock_get_cache.return_value = mock_cache
|
||||
host = 'fred'
|
||||
port = '1234'
|
||||
hostport = '%s:%s' % (host, port)
|
||||
result = filecache.retrieve_data(host, port)
|
||||
mock_cache.get.assert_called_once_with(hostport, expiration_time=None)
|
||||
self.assertIsNone(result)
|
||||
|
||||
@mock.patch.object(os.path, 'isfile', autospec=True)
|
||||
def test_retrieve_data_no_cache_file(self, mock_isfile):
|
||||
mock_isfile.return_value = False
|
||||
self.assertIsNone(filecache.retrieve_data(host='spam', port='eggs'))
|
@ -1,788 +0,0 @@
|
||||
# Copyright 2012 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import time
|
||||
|
||||
import mock
|
||||
from oslo_serialization import jsonutils
|
||||
import requests
|
||||
import six
|
||||
from six.moves import http_client
|
||||
|
||||
from keystoneauth1 import exceptions as kexc
|
||||
|
||||
from ironicclient.common import filecache
|
||||
from ironicclient.common import http
|
||||
from ironicclient import exc
|
||||
from ironicclient.tests.unit import utils
|
||||
|
||||
|
||||
DEFAULT_TIMEOUT = 600
|
||||
|
||||
DEFAULT_HOST = 'localhost'
|
||||
DEFAULT_PORT = '1234'
|
||||
|
||||
|
||||
def _get_error_body(faultstring=None, debuginfo=None, description=None):
|
||||
if description:
|
||||
error_body = {'description': description}
|
||||
else:
|
||||
error_body = {
|
||||
'faultstring': faultstring,
|
||||
'debuginfo': debuginfo
|
||||
}
|
||||
raw_error_body = jsonutils.dump_as_bytes(error_body)
|
||||
body = {'error_message': raw_error_body}
|
||||
return jsonutils.dumps(body)
|
||||
|
||||
|
||||
def _session_client(**kwargs):
|
||||
return http.SessionClient(os_ironic_api_version='1.6',
|
||||
api_version_select_state='default',
|
||||
max_retries=5,
|
||||
retry_interval=2,
|
||||
auth=None,
|
||||
interface='publicURL',
|
||||
service_type='baremetal',
|
||||
region_name='',
|
||||
endpoint='http://%s:%s' % (DEFAULT_HOST,
|
||||
DEFAULT_PORT),
|
||||
**kwargs)
|
||||
|
||||
|
||||
class VersionNegotiationMixinTest(utils.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(VersionNegotiationMixinTest, self).setUp()
|
||||
self.test_object = http.VersionNegotiationMixin()
|
||||
self.test_object.os_ironic_api_version = '1.6'
|
||||
self.test_object.api_version_select_state = 'default'
|
||||
self.test_object.endpoint = "http://localhost:1234"
|
||||
self.mock_mcu = mock.MagicMock()
|
||||
self.test_object._make_connection_url = self.mock_mcu
|
||||
self.response = utils.FakeResponse(
|
||||
{}, status=http_client.NOT_ACCEPTABLE)
|
||||
self.test_object.get_server = mock.MagicMock(
|
||||
return_value=('localhost', '1234'))
|
||||
|
||||
def test__generic_parse_version_headers_has_headers(self):
|
||||
response = {'X-OpenStack-Ironic-API-Minimum-Version': '1.1',
|
||||
'X-OpenStack-Ironic-API-Maximum-Version': '1.6',
|
||||
}
|
||||
expected = ('1.1', '1.6')
|
||||
result = self.test_object._generic_parse_version_headers(response.get)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test__generic_parse_version_headers_missing_headers(self):
|
||||
response = {}
|
||||
expected = (None, None)
|
||||
result = self.test_object._generic_parse_version_headers(response.get)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
@mock.patch.object(filecache, 'save_data', autospec=True)
|
||||
def test_negotiate_version_bad_state(self, mock_save_data):
|
||||
# Test if bad api_version_select_state value
|
||||
self.test_object.api_version_select_state = 'word of the day: augur'
|
||||
self.assertRaises(
|
||||
RuntimeError,
|
||||
self.test_object.negotiate_version,
|
||||
None, None)
|
||||
self.assertEqual(0, mock_save_data.call_count)
|
||||
|
||||
@mock.patch.object(filecache, 'save_data', autospec=True)
|
||||
@mock.patch.object(http.VersionNegotiationMixin, '_parse_version_headers',
|
||||
autospec=True)
|
||||
def test_negotiate_version_server_older(self, mock_pvh, mock_save_data):
|
||||
# Test newer client and older server
|
||||
latest_ver = '1.5'
|
||||
mock_pvh.return_value = ('1.1', latest_ver)
|
||||
mock_conn = mock.MagicMock()
|
||||
result = self.test_object.negotiate_version(mock_conn, self.response)
|
||||
self.assertEqual(latest_ver, result)
|
||||
self.assertEqual(1, mock_pvh.call_count)
|
||||
host, port = http.get_server(self.test_object.endpoint)
|
||||
mock_save_data.assert_called_once_with(host=host, port=port,
|
||||
data=latest_ver)
|
||||
|
||||
@mock.patch.object(filecache, 'save_data', autospec=True)
|
||||
@mock.patch.object(http.VersionNegotiationMixin, '_parse_version_headers',
|
||||
autospec=True)
|
||||
def test_negotiate_version_server_newer(self, mock_pvh, mock_save_data):
|
||||
# Test newer server and older client
|
||||
mock_pvh.return_value = ('1.1', '1.10')
|
||||
mock_conn = mock.MagicMock()
|
||||
result = self.test_object.negotiate_version(mock_conn, self.response)
|
||||
self.assertEqual('1.6', result)
|
||||
self.assertEqual(1, mock_pvh.call_count)
|
||||
mock_save_data.assert_called_once_with(host=DEFAULT_HOST,
|
||||
port=DEFAULT_PORT,
|
||||
data='1.6')
|
||||
|
||||
@mock.patch.object(filecache, 'save_data', autospec=True)
|
||||
@mock.patch.object(http.VersionNegotiationMixin, '_make_simple_request',
|
||||
autospec=True)
|
||||
@mock.patch.object(http.VersionNegotiationMixin, '_parse_version_headers',
|
||||
autospec=True)
|
||||
def test_negotiate_version_server_no_version_on_error(
|
||||
self, mock_pvh, mock_msr, mock_save_data):
|
||||
# Test older Ironic version which errored with no version number and
|
||||
# have to retry with simple get
|
||||
mock_pvh.side_effect = iter([(None, None), ('1.1', '1.2')])
|
||||
mock_conn = mock.MagicMock()
|
||||
result = self.test_object.negotiate_version(mock_conn, self.response)
|
||||
self.assertEqual('1.2', result)
|
||||
self.assertTrue(mock_msr.called)
|
||||
self.assertEqual(2, mock_pvh.call_count)
|
||||
self.assertEqual(1, mock_save_data.call_count)
|
||||
|
||||
@mock.patch.object(filecache, 'save_data', autospec=True)
|
||||
@mock.patch.object(http.VersionNegotiationMixin, '_parse_version_headers',
|
||||
autospec=True)
|
||||
def test_negotiate_version_server_explicit_too_high(self, mock_pvh,
|
||||
mock_save_data):
|
||||
# requested version is not supported because it is too large
|
||||
mock_pvh.return_value = ('1.1', '1.6')
|
||||
mock_conn = mock.MagicMock()
|
||||
self.test_object.api_version_select_state = 'user'
|
||||
self.test_object.os_ironic_api_version = '99.99'
|
||||
self.assertRaises(
|
||||
exc.UnsupportedVersion,
|
||||
self.test_object.negotiate_version,
|
||||
mock_conn, self.response)
|
||||
self.assertEqual(1, mock_pvh.call_count)
|
||||
self.assertEqual(0, mock_save_data.call_count)
|
||||
|
||||
@mock.patch.object(filecache, 'save_data', autospec=True)
|
||||
@mock.patch.object(http.VersionNegotiationMixin, '_parse_version_headers',
|
||||
autospec=True)
|
||||
def test_negotiate_version_server_explicit_not_supported(self, mock_pvh,
|
||||
mock_save_data):
|
||||
# requested version is supported by the server but the server returned
|
||||
# 406 because the requested operation is not supported with the
|
||||
# requested version
|
||||
mock_pvh.return_value = ('1.1', '1.6')
|
||||
mock_conn = mock.MagicMock()
|
||||
self.test_object.api_version_select_state = 'negotiated'
|
||||
self.test_object.os_ironic_api_version = '1.5'
|
||||
self.assertRaises(
|
||||
exc.UnsupportedVersion,
|
||||
self.test_object.negotiate_version,
|
||||
mock_conn, self.response)
|
||||
self.assertEqual(1, mock_pvh.call_count)
|
||||
self.assertEqual(0, mock_save_data.call_count)
|
||||
|
||||
def test_get_server(self):
|
||||
host = 'ironic-host'
|
||||
port = '6385'
|
||||
endpoint = 'http://%s:%s/ironic/v1/' % (host, port)
|
||||
self.assertEqual((host, port), http.get_server(endpoint))
|
||||
|
||||
|
||||
class HttpClientTest(utils.BaseTestCase):
|
||||
|
||||
def test_url_generation_trailing_slash_in_base(self):
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
url = client._make_connection_url('/v1/resources')
|
||||
self.assertEqual('http://localhost/v1/resources', url)
|
||||
|
||||
def test_url_generation_without_trailing_slash_in_base(self):
|
||||
client = http.HTTPClient('http://localhost')
|
||||
url = client._make_connection_url('/v1/resources')
|
||||
self.assertEqual('http://localhost/v1/resources', url)
|
||||
|
||||
def test_url_generation_without_prefix_slash_in_path(self):
|
||||
client = http.HTTPClient('http://localhost')
|
||||
url = client._make_connection_url('v1/resources')
|
||||
self.assertEqual('http://localhost/v1/resources', url)
|
||||
|
||||
def test_server_https_request_with_application_octet_stream(self):
|
||||
client = http.HTTPClient('https://localhost/')
|
||||
client.session = utils.mockSession(
|
||||
{'Content-Type': 'application/octet-stream'},
|
||||
"Body",
|
||||
version=1,
|
||||
status_code=http_client.OK)
|
||||
|
||||
response, body = client.json_request('GET', '/v1/resources')
|
||||
self.assertEqual(client.session.request.return_value, response)
|
||||
self.assertIsNone(body)
|
||||
|
||||
def test_server_exception_empty_body(self):
|
||||
error_body = _get_error_body()
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
client.session = utils.mockSession(
|
||||
{'Content-Type': 'application/json'},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.INTERNAL_SERVER_ERROR)
|
||||
|
||||
self.assertRaises(exc.InternalServerError,
|
||||
client.json_request,
|
||||
'GET', '/v1/resources')
|
||||
|
||||
def test_server_exception_msg_only(self):
|
||||
error_msg = 'test error msg'
|
||||
error_body = _get_error_body(error_msg)
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
client.session = utils.mockSession(
|
||||
{'Content-Type': 'application/json'},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.INTERNAL_SERVER_ERROR)
|
||||
|
||||
self.assertRaises(exc.InternalServerError,
|
||||
client.json_request,
|
||||
'GET', '/v1/resources')
|
||||
|
||||
def test_server_exception_description_only(self):
|
||||
error_msg = 'test error msg'
|
||||
error_body = _get_error_body(description=error_msg)
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
client.session = utils.mockSession(
|
||||
{'Content-Type': 'application/json'},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.BAD_REQUEST)
|
||||
|
||||
self.assertRaisesRegex(exc.BadRequest, 'test error msg',
|
||||
client.json_request,
|
||||
'GET', '/v1/resources')
|
||||
|
||||
def test_server_https_request_ok(self):
|
||||
client = http.HTTPClient('https://localhost/')
|
||||
client.session = utils.mockSession(
|
||||
{'Content-Type': 'application/json'},
|
||||
"Body",
|
||||
version=1,
|
||||
status_code=http_client.OK)
|
||||
|
||||
client.json_request('GET', '/v1/resources')
|
||||
|
||||
def test_server_https_empty_body(self):
|
||||
error_body = _get_error_body()
|
||||
|
||||
client = http.HTTPClient('https://localhost/')
|
||||
client.session = utils.mockSession(
|
||||
{'Content-Type': 'application/json'},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.INTERNAL_SERVER_ERROR)
|
||||
|
||||
self.assertRaises(exc.InternalServerError,
|
||||
client.json_request,
|
||||
'GET', '/v1/resources')
|
||||
|
||||
def test_401_unauthorized_exception(self):
|
||||
error_body = _get_error_body()
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
client.session = utils.mockSession(
|
||||
{'Content-Type': 'text/plain'},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.UNAUTHORIZED)
|
||||
|
||||
self.assertRaises(exc.Unauthorized, client.json_request,
|
||||
'GET', '/v1/resources')
|
||||
|
||||
def test_http_request_not_valid_request(self):
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
client.session.request = mock.Mock(
|
||||
side_effect=http.requests.exceptions.InvalidSchema)
|
||||
|
||||
self.assertRaises(exc.ValidationError, client._http_request,
|
||||
'http://localhost/', 'GET')
|
||||
|
||||
def test__parse_version_headers(self):
|
||||
# Test parsing of version headers from HTTPClient
|
||||
error_body = _get_error_body()
|
||||
expected_result = ('1.1', '1.6')
|
||||
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
fake_resp = utils.mockSessionResponse(
|
||||
{'X-OpenStack-Ironic-API-Minimum-Version': '1.1',
|
||||
'X-OpenStack-Ironic-API-Maximum-Version': '1.6',
|
||||
'Content-Type': 'text/plain',
|
||||
},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.NOT_ACCEPTABLE)
|
||||
result = client._parse_version_headers(fake_resp)
|
||||
self.assertEqual(expected_result, result)
|
||||
|
||||
@mock.patch.object(filecache, 'save_data', autospec=True)
|
||||
def test__http_request_client_fallback_fail(self, mock_save_data):
|
||||
# Test when fallback to a supported version fails
|
||||
host, port, latest_ver = 'localhost', '1234', '1.6'
|
||||
error_body = _get_error_body()
|
||||
|
||||
client = http.HTTPClient('http://%s:%s/' % (host, port))
|
||||
client.session = utils.mockSession(
|
||||
{'X-OpenStack-Ironic-API-Minimum-Version': '1.1',
|
||||
'X-OpenStack-Ironic-API-Maximum-Version': latest_ver,
|
||||
'content-type': 'text/plain',
|
||||
},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.NOT_ACCEPTABLE)
|
||||
self.assertRaises(
|
||||
exc.UnsupportedVersion,
|
||||
client._http_request,
|
||||
'/v1/resources',
|
||||
'GET')
|
||||
mock_save_data.assert_called_once_with(host=host, data=latest_ver,
|
||||
port=port)
|
||||
|
||||
@mock.patch.object(http.VersionNegotiationMixin, 'negotiate_version',
|
||||
autospec=False)
|
||||
def test__http_request_client_fallback_success(self, mock_negotiate):
|
||||
# Test when fallback to a supported version succeeds
|
||||
mock_negotiate.return_value = '1.6'
|
||||
error_body = _get_error_body()
|
||||
bad_resp = utils.mockSessionResponse(
|
||||
{'X-OpenStack-Ironic-API-Minimum-Version': '1.1',
|
||||
'X-OpenStack-Ironic-API-Maximum-Version': '1.6',
|
||||
'content-type': 'text/plain',
|
||||
},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.NOT_ACCEPTABLE)
|
||||
good_resp = utils.mockSessionResponse(
|
||||
{'X-OpenStack-Ironic-API-Minimum-Version': '1.1',
|
||||
'X-OpenStack-Ironic-API-Maximum-Version': '1.6',
|
||||
'content-type': 'text/plain',
|
||||
},
|
||||
"We got some text",
|
||||
version=1,
|
||||
status_code=http_client.OK)
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
|
||||
with mock.patch.object(client, 'session',
|
||||
autospec=True) as mock_session:
|
||||
|
||||
mock_session.request.side_effect = iter([bad_resp, good_resp])
|
||||
response, body_iter = client._http_request('/v1/resources', 'GET')
|
||||
|
||||
self.assertEqual(http_client.OK, response.status_code)
|
||||
self.assertEqual(1, mock_negotiate.call_count)
|
||||
|
||||
@mock.patch.object(http.LOG, 'debug', autospec=True)
|
||||
def test_log_curl_request_mask_password(self, mock_log):
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
kwargs = {'headers': {'foo-header': 'bar-header'},
|
||||
'body': '{"password": "foo"}'}
|
||||
client.log_curl_request('foo', 'http://127.0.0.1', kwargs)
|
||||
expected_log = ("curl -i -X foo -H 'foo-header: bar-header' "
|
||||
"-d '{\"password\": \"***\"}' http://127.0.0.1")
|
||||
mock_log.assert_called_once_with(expected_log)
|
||||
|
||||
@mock.patch.object(http.LOG, 'debug', autospec=True)
|
||||
def test_log_http_response_mask_password(self, mock_log):
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
fake_response = utils.FakeResponse({}, version=1, reason='foo',
|
||||
status=200)
|
||||
body = '{"password": "foo"}'
|
||||
client.log_http_response(fake_response, body=body)
|
||||
expected_log = ("\nHTTP/0.1 200 foo\n\n{\"password\": \"***\"}\n")
|
||||
mock_log.assert_called_once_with(expected_log)
|
||||
|
||||
def test__https_init_ssl_args_insecure(self):
|
||||
client = http.HTTPClient('https://localhost/', insecure=True)
|
||||
|
||||
self.assertEqual(False, client.session.verify)
|
||||
|
||||
def test__https_init_ssl_args_secure(self):
|
||||
client = http.HTTPClient('https://localhost/', ca_file='test_ca',
|
||||
key_file='test_key', cert_file='test_cert')
|
||||
|
||||
self.assertEqual('test_ca', client.session.verify)
|
||||
self.assertEqual(('test_cert', 'test_key'), client.session.cert)
|
||||
|
||||
@mock.patch.object(http.LOG, 'debug', autospec=True)
|
||||
def test_log_curl_request_with_body_and_header(self, mock_log):
|
||||
client = http.HTTPClient('http://test')
|
||||
headers = {'header1': 'value1'}
|
||||
body = 'example body'
|
||||
|
||||
client.log_curl_request('GET', '/v1/nodes',
|
||||
{'headers': headers, 'body': body})
|
||||
|
||||
self.assertTrue(mock_log.called)
|
||||
self.assertTrue(mock_log.call_args[0])
|
||||
self.assertEqual("curl -i -X GET -H 'header1: value1'"
|
||||
" -d 'example body' http://test/v1/nodes",
|
||||
mock_log.call_args[0][0])
|
||||
|
||||
@mock.patch.object(http.LOG, 'debug', autospec=True)
|
||||
def test_log_curl_request_with_certs(self, mock_log):
|
||||
headers = {'header1': 'value1'}
|
||||
client = http.HTTPClient('https://test', key_file='key',
|
||||
cert_file='cert', cacert='cacert',
|
||||
token='fake-token')
|
||||
|
||||
client.log_curl_request('GET', '/v1/test', {'headers': headers})
|
||||
|
||||
self.assertTrue(mock_log.called)
|
||||
self.assertTrue(mock_log.call_args[0])
|
||||
|
||||
self.assertEqual("curl -i -X GET -H 'header1: value1' "
|
||||
"--cert cert --key key https://test/v1/test",
|
||||
mock_log.call_args[0][0])
|
||||
|
||||
@mock.patch.object(http.LOG, 'debug', autospec=True)
|
||||
def test_log_curl_request_with_insecure_param(self, mock_log):
|
||||
headers = {'header1': 'value1'}
|
||||
http_client_object = http.HTTPClient('https://test', insecure=True,
|
||||
token='fake-token')
|
||||
|
||||
http_client_object.log_curl_request('GET', '/v1/test',
|
||||
{'headers': headers})
|
||||
|
||||
self.assertTrue(mock_log.called)
|
||||
self.assertTrue(mock_log.call_args[0])
|
||||
self.assertEqual("curl -i -X GET -H 'header1: value1' -k "
|
||||
"--cert None --key None https://test/v1/test",
|
||||
mock_log.call_args[0][0])
|
||||
|
||||
|
||||
class SessionClientTest(utils.BaseTestCase):
|
||||
|
||||
def test_server_exception_empty_body(self):
|
||||
error_body = _get_error_body()
|
||||
|
||||
fake_session = utils.mockSession({'Content-Type': 'application/json'},
|
||||
error_body,
|
||||
http_client.INTERNAL_SERVER_ERROR)
|
||||
|
||||
client = _session_client(session=fake_session)
|
||||
|
||||
self.assertRaises(exc.InternalServerError,
|
||||
client.json_request,
|
||||
'GET', '/v1/resources')
|
||||
|
||||
def test_server_exception_description_only(self):
|
||||
error_msg = 'test error msg'
|
||||
error_body = _get_error_body(description=error_msg)
|
||||
fake_session = utils.mockSession(
|
||||
{'Content-Type': 'application/json'},
|
||||
error_body, status_code=http_client.BAD_REQUEST)
|
||||
client = _session_client(session=fake_session)
|
||||
|
||||
self.assertRaisesRegex(exc.BadRequest, 'test error msg',
|
||||
client.json_request,
|
||||
'GET', '/v1/resources')
|
||||
|
||||
def test__parse_version_headers(self):
|
||||
# Test parsing of version headers from SessionClient
|
||||
fake_session = utils.mockSession(
|
||||
{'X-OpenStack-Ironic-API-Minimum-Version': '1.1',
|
||||
'X-OpenStack-Ironic-API-Maximum-Version': '1.6',
|
||||
'content-type': 'text/plain',
|
||||
},
|
||||
None,
|
||||
http_client.HTTP_VERSION_NOT_SUPPORTED)
|
||||
expected_result = ('1.1', '1.6')
|
||||
client = _session_client(session=fake_session)
|
||||
result = client._parse_version_headers(fake_session.request())
|
||||
self.assertEqual(expected_result, result)
|
||||
|
||||
def _test_endpoint_override(self, endpoint):
|
||||
fake_session = utils.mockSession({'content-type': 'application/json'},
|
||||
status_code=http_client.NO_CONTENT)
|
||||
request_mock = mock.Mock()
|
||||
fake_session.request = request_mock
|
||||
request_mock.return_value = utils.mockSessionResponse(
|
||||
headers={'content-type': 'application/json'},
|
||||
status_code=http_client.NO_CONTENT)
|
||||
client = _session_client(session=fake_session,
|
||||
endpoint_override=endpoint)
|
||||
client.json_request('DELETE', '/v1/nodes/aa/maintenance')
|
||||
expected_args_dict = {
|
||||
'headers': {
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json',
|
||||
'X-OpenStack-Ironic-API-Version': '1.6'
|
||||
},
|
||||
'auth': None, 'user_agent': 'python-ironicclient',
|
||||
'endpoint_filter': {
|
||||
'interface': 'publicURL',
|
||||
'service_type': 'baremetal',
|
||||
'region_name': ''
|
||||
}
|
||||
}
|
||||
if isinstance(endpoint, six.string_types):
|
||||
trimmed = http._trim_endpoint_api_version(endpoint)
|
||||
expected_args_dict['endpoint_override'] = trimmed
|
||||
request_mock.assert_called_once_with(
|
||||
'/v1/nodes/aa/maintenance', 'DELETE', raise_exc=False,
|
||||
**expected_args_dict
|
||||
)
|
||||
|
||||
def test_endpoint_override(self):
|
||||
self._test_endpoint_override('http://1.0.0.1:6385')
|
||||
|
||||
def test_endpoint_override_with_version(self):
|
||||
self._test_endpoint_override('http://1.0.0.1:6385/v1')
|
||||
|
||||
def test_endpoint_override_not_valid(self):
|
||||
self._test_endpoint_override(True)
|
||||
|
||||
|
||||
@mock.patch.object(time, 'sleep', lambda *_: None)
|
||||
class RetriesTestCase(utils.BaseTestCase):
|
||||
|
||||
def test_http_no_retry(self):
|
||||
error_body = _get_error_body()
|
||||
bad_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'text/plain'},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.CONFLICT)
|
||||
client = http.HTTPClient('http://localhost/', max_retries=0)
|
||||
|
||||
with mock.patch.object(client.session, 'request', autospec=True,
|
||||
return_value=bad_resp) as mock_request:
|
||||
|
||||
self.assertRaises(exc.Conflict, client._http_request,
|
||||
'/v1/resources', 'GET')
|
||||
self.assertEqual(1, mock_request.call_count)
|
||||
|
||||
def test_http_retry(self):
|
||||
error_body = _get_error_body()
|
||||
bad_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'text/plain'},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.CONFLICT)
|
||||
good_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'text/plain'},
|
||||
"meow",
|
||||
version=1,
|
||||
status_code=http_client.OK)
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
|
||||
with mock.patch.object(client, 'session',
|
||||
autospec=True) as mock_session:
|
||||
|
||||
mock_session.request.side_effect = iter([bad_resp, good_resp])
|
||||
response, body_iter = client._http_request('/v1/resources', 'GET')
|
||||
|
||||
self.assertEqual(http_client.OK, response.status_code)
|
||||
self.assertEqual(2, mock_session.request.call_count)
|
||||
|
||||
def test_http_retry_503(self):
|
||||
error_body = _get_error_body()
|
||||
bad_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'text/plain'},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.SERVICE_UNAVAILABLE)
|
||||
good_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'text/plain'},
|
||||
"meow",
|
||||
version=1,
|
||||
status_code=http_client.OK)
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
|
||||
with mock.patch.object(client, 'session',
|
||||
autospec=True) as mock_session:
|
||||
mock_session.request.side_effect = iter([bad_resp, good_resp])
|
||||
response, body_iter = client._http_request('/v1/resources', 'GET')
|
||||
|
||||
self.assertEqual(http_client.OK, response.status_code)
|
||||
self.assertEqual(2, mock_session.request.call_count)
|
||||
|
||||
def test_http_retry_connection_refused(self):
|
||||
good_resp = utils.mockSessionResponse(
|
||||
{'content-type': 'text/plain'},
|
||||
"meow",
|
||||
version=1,
|
||||
status_code=http_client.OK)
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
|
||||
with mock.patch.object(client, 'session',
|
||||
autospec=True) as mock_session:
|
||||
mock_session.request.side_effect = iter([exc.ConnectionRefused(),
|
||||
good_resp])
|
||||
response, body_iter = client._http_request('/v1/resources', 'GET')
|
||||
|
||||
self.assertEqual(http_client.OK, response.status_code)
|
||||
self.assertEqual(2, mock_session.request.call_count)
|
||||
|
||||
def test_http_failed_retry(self):
|
||||
error_body = _get_error_body()
|
||||
bad_resp = utils.mockSessionResponse(
|
||||
{'content-type': 'text/plain'},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.CONFLICT)
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
|
||||
with mock.patch.object(client, 'session',
|
||||
autospec=True) as mock_session:
|
||||
mock_session.request.return_value = bad_resp
|
||||
self.assertRaises(exc.Conflict, client._http_request,
|
||||
'/v1/resources', 'GET')
|
||||
self.assertEqual(http.DEFAULT_MAX_RETRIES + 1,
|
||||
mock_session.request.call_count)
|
||||
|
||||
def test_http_max_retries_none(self):
|
||||
error_body = _get_error_body()
|
||||
bad_resp = utils.mockSessionResponse(
|
||||
{'content-type': 'text/plain'},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.CONFLICT)
|
||||
client = http.HTTPClient('http://localhost/', max_retries=None)
|
||||
|
||||
with mock.patch.object(client, 'session',
|
||||
autospec=True) as mock_session:
|
||||
mock_session.request.return_value = bad_resp
|
||||
self.assertRaises(exc.Conflict, client._http_request,
|
||||
'/v1/resources', 'GET')
|
||||
self.assertEqual(http.DEFAULT_MAX_RETRIES + 1,
|
||||
mock_session.request.call_count)
|
||||
|
||||
def test_http_change_max_retries(self):
|
||||
error_body = _get_error_body()
|
||||
bad_resp = utils.mockSessionResponse(
|
||||
{'content-type': 'text/plain'},
|
||||
error_body,
|
||||
version=1,
|
||||
status_code=http_client.CONFLICT)
|
||||
client = http.HTTPClient('http://localhost/',
|
||||
max_retries=http.DEFAULT_MAX_RETRIES + 1)
|
||||
|
||||
with mock.patch.object(client, 'session',
|
||||
autospec=True) as mock_session:
|
||||
mock_session.request.return_value = bad_resp
|
||||
self.assertRaises(exc.Conflict, client._http_request,
|
||||
'/v1/resources', 'GET')
|
||||
self.assertEqual(http.DEFAULT_MAX_RETRIES + 2,
|
||||
mock_session.request.call_count)
|
||||
|
||||
def test_session_retry(self):
|
||||
error_body = _get_error_body()
|
||||
|
||||
fake_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'application/json'},
|
||||
error_body,
|
||||
http_client.CONFLICT)
|
||||
ok_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'application/json'},
|
||||
b"OK",
|
||||
http_client.OK)
|
||||
fake_session = mock.Mock(spec=requests.Session)
|
||||
fake_session.request.side_effect = iter((fake_resp, ok_resp))
|
||||
|
||||
client = _session_client(session=fake_session)
|
||||
client.json_request('GET', '/v1/resources')
|
||||
self.assertEqual(2, fake_session.request.call_count)
|
||||
|
||||
def test_session_retry_503(self):
|
||||
error_body = _get_error_body()
|
||||
|
||||
fake_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'application/json'},
|
||||
error_body,
|
||||
http_client.SERVICE_UNAVAILABLE)
|
||||
ok_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'application/json'},
|
||||
b"OK",
|
||||
http_client.OK)
|
||||
fake_session = mock.Mock(spec=requests.Session)
|
||||
fake_session.request.side_effect = iter((fake_resp, ok_resp))
|
||||
|
||||
client = _session_client(session=fake_session)
|
||||
client.json_request('GET', '/v1/resources')
|
||||
self.assertEqual(2, fake_session.request.call_count)
|
||||
|
||||
def test_session_retry_connection_refused(self):
|
||||
ok_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'application/json'},
|
||||
b"OK",
|
||||
http_client.OK)
|
||||
fake_session = mock.Mock(spec=requests.Session)
|
||||
fake_session.request.side_effect = iter((exc.ConnectionRefused(),
|
||||
ok_resp))
|
||||
|
||||
client = _session_client(session=fake_session)
|
||||
client.json_request('GET', '/v1/resources')
|
||||
self.assertEqual(2, fake_session.request.call_count)
|
||||
|
||||
def test_session_retry_retriable_connection_failure(self):
|
||||
ok_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'application/json'},
|
||||
b"OK",
|
||||
http_client.OK)
|
||||
fake_session = mock.Mock(spec=requests.Session)
|
||||
fake_session.request.side_effect = iter(
|
||||
(kexc.RetriableConnectionFailure(), ok_resp))
|
||||
|
||||
client = _session_client(session=fake_session)
|
||||
client.json_request('GET', '/v1/resources')
|
||||
self.assertEqual(2, fake_session.request.call_count)
|
||||
|
||||
def test_session_retry_fail(self):
|
||||
error_body = _get_error_body()
|
||||
|
||||
fake_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'application/json'},
|
||||
error_body,
|
||||
http_client.CONFLICT)
|
||||
fake_session = mock.Mock(spec=requests.Session)
|
||||
fake_session.request.return_value = fake_resp
|
||||
|
||||
client = _session_client(session=fake_session)
|
||||
|
||||
self.assertRaises(exc.Conflict, client.json_request,
|
||||
'GET', '/v1/resources')
|
||||
self.assertEqual(http.DEFAULT_MAX_RETRIES + 1,
|
||||
fake_session.request.call_count)
|
||||
|
||||
def test_session_max_retries_none(self):
|
||||
error_body = _get_error_body()
|
||||
|
||||
fake_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'application/json'},
|
||||
error_body,
|
||||
http_client.CONFLICT)
|
||||
fake_session = mock.Mock(spec=requests.Session)
|
||||
fake_session.request.return_value = fake_resp
|
||||
|
||||
client = _session_client(session=fake_session)
|
||||
client.conflict_max_retries = None
|
||||
|
||||
self.assertRaises(exc.Conflict, client.json_request,
|
||||
'GET', '/v1/resources')
|
||||
self.assertEqual(http.DEFAULT_MAX_RETRIES + 1,
|
||||
fake_session.request.call_count)
|
||||
|
||||
def test_session_change_max_retries(self):
|
||||
error_body = _get_error_body()
|
||||
|
||||
fake_resp = utils.mockSessionResponse(
|
||||
{'Content-Type': 'application/json'},
|
||||
error_body,
|
||||
http_client.CONFLICT)
|
||||
fake_session = mock.Mock(spec=requests.Session)
|
||||
fake_session.request.return_value = fake_resp
|
||||
|
||||
client = _session_client(session=fake_session)
|
||||
client.conflict_max_retries = http.DEFAULT_MAX_RETRIES + 1
|
||||
|
||||
self.assertRaises(exc.Conflict, client.json_request,
|
||||
'GET', '/v1/resources')
|
||||
self.assertEqual(http.DEFAULT_MAX_RETRIES + 2,
|
||||
fake_session.request.call_count)
|
@ -1,371 +0,0 @@
|
||||
# Copyright 2013 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import mock
|
||||
import six.moves.builtins as __builtin__
|
||||
|
||||
from ironicclient.common import utils
|
||||
from ironicclient import exc
|
||||
from ironicclient.tests.unit import utils as test_utils
|
||||
|
||||
|
||||
class UtilsTest(test_utils.BaseTestCase):
|
||||
|
||||
def test_key_value_pairs_to_dict(self):
|
||||
kv_list = ['str=foo', 'int=1', 'bool=true',
|
||||
'list=[1, 2, 3]', 'dict={"foo": "bar"}']
|
||||
|
||||
d = utils.key_value_pairs_to_dict(kv_list)
|
||||
self.assertEqual(
|
||||
{'str': 'foo', 'int': 1, 'bool': True,
|
||||
'list': [1, 2, 3], 'dict': {'foo': 'bar'}},
|
||||
d)
|
||||
|
||||
def test_key_value_pairs_to_dict_nothing(self):
|
||||
self.assertEqual({}, utils.key_value_pairs_to_dict(None))
|
||||
self.assertEqual({}, utils.key_value_pairs_to_dict([]))
|
||||
|
||||
def test_args_array_to_dict(self):
|
||||
my_args = {
|
||||
'matching_metadata': ['str=foo', 'int=1', 'bool=true',
|
||||
'list=[1, 2, 3]', 'dict={"foo": "bar"}'],
|
||||
'other': 'value'
|
||||
}
|
||||
cleaned_dict = utils.args_array_to_dict(my_args,
|
||||
"matching_metadata")
|
||||
self.assertEqual({
|
||||
'matching_metadata': {'str': 'foo', 'int': 1, 'bool': True,
|
||||
'list': [1, 2, 3], 'dict': {'foo': 'bar'}},
|
||||
'other': 'value'
|
||||
}, cleaned_dict)
|
||||
|
||||
def test_args_array_to_patch(self):
|
||||
my_args = {
|
||||
'attributes': ['str=foo', 'int=1', 'bool=true',
|
||||
'list=[1, 2, 3]', 'dict={"foo": "bar"}'],
|
||||
'op': 'add',
|
||||
}
|
||||
patch = utils.args_array_to_patch(my_args['op'],
|
||||
my_args['attributes'])
|
||||
self.assertEqual([{'op': 'add', 'value': 'foo', 'path': '/str'},
|
||||
{'op': 'add', 'value': 1, 'path': '/int'},
|
||||
{'op': 'add', 'value': True, 'path': '/bool'},
|
||||
{'op': 'add', 'value': [1, 2, 3], 'path': '/list'},
|
||||
{'op': 'add', 'value': {"foo": "bar"},
|
||||
'path': '/dict'}], patch)
|
||||
|
||||
def test_args_array_to_patch_format_error(self):
|
||||
my_args = {
|
||||
'attributes': ['foobar'],
|
||||
'op': 'add',
|
||||
}
|
||||
self.assertRaises(exc.CommandError, utils.args_array_to_patch,
|
||||
my_args['op'], my_args['attributes'])
|
||||
|
||||
def test_args_array_to_patch_remove(self):
|
||||
my_args = {
|
||||
'attributes': ['/foo', 'extra/bar'],
|
||||
'op': 'remove',
|
||||
}
|
||||
patch = utils.args_array_to_patch(my_args['op'],
|
||||
my_args['attributes'])
|
||||
self.assertEqual([{'op': 'remove', 'path': '/foo'},
|
||||
{'op': 'remove', 'path': '/extra/bar'}], patch)
|
||||
|
||||
def test_split_and_deserialize(self):
|
||||
ret = utils.split_and_deserialize('str=foo')
|
||||
self.assertEqual(('str', 'foo'), ret)
|
||||
|
||||
ret = utils.split_and_deserialize('int=1')
|
||||
self.assertEqual(('int', 1), ret)
|
||||
|
||||
ret = utils.split_and_deserialize('bool=false')
|
||||
self.assertEqual(('bool', False), ret)
|
||||
|
||||
ret = utils.split_and_deserialize('list=[1, "foo", 2]')
|
||||
self.assertEqual(('list', [1, "foo", 2]), ret)
|
||||
|
||||
ret = utils.split_and_deserialize('dict={"foo": 1}')
|
||||
self.assertEqual(('dict', {"foo": 1}), ret)
|
||||
|
||||
ret = utils.split_and_deserialize('str_int="1"')
|
||||
self.assertEqual(('str_int', "1"), ret)
|
||||
|
||||
def test_split_and_deserialize_fail(self):
|
||||
self.assertRaises(exc.CommandError,
|
||||
utils.split_and_deserialize, 'foo:bar')
|
||||
|
||||
def test_bool_arg_value(self):
|
||||
self.assertTrue(utils.bool_argument_value('arg', 'y', strict=True))
|
||||
self.assertTrue(utils.bool_argument_value('arg', 'TrUe', strict=True))
|
||||
self.assertTrue(utils.bool_argument_value('arg', '1', strict=True))
|
||||
self.assertTrue(utils.bool_argument_value('arg', 1, strict=True))
|
||||
|
||||
self.assertFalse(utils.bool_argument_value('arg', '0', strict=True))
|
||||
self.assertFalse(utils.bool_argument_value('arg', 'No', strict=True))
|
||||
|
||||
self.assertRaises(exc.CommandError,
|
||||
utils.bool_argument_value, 'arg', 'ee', strict=True)
|
||||
|
||||
self.assertFalse(utils.bool_argument_value('arg', 'ee', strict=False))
|
||||
self.assertTrue(utils.bool_argument_value('arg', 'ee', strict=False,
|
||||
default=True))
|
||||
# No check that default is a Boolean...
|
||||
self.assertEqual('foo', utils.bool_argument_value('arg', 'ee',
|
||||
strict=False, default='foo'))
|
||||
|
||||
def test_check_for_invalid_fields(self):
|
||||
self.assertIsNone(utils.check_for_invalid_fields(
|
||||
['a', 'b'], ['a', 'b', 'c']))
|
||||
# 'd' is not a valid field
|
||||
self.assertRaises(exc.CommandError, utils.check_for_invalid_fields,
|
||||
['a', 'd'], ['a', 'b', 'c'])
|
||||
|
||||
def test_convert_list_props_to_comma_separated_strings(self):
|
||||
data = {'prop1': 'val1',
|
||||
'prop2': ['item1', 'item2', 'item3']}
|
||||
result = utils.convert_list_props_to_comma_separated(data)
|
||||
self.assertEqual('val1', result['prop1'])
|
||||
self.assertEqual('item1, item2, item3', result['prop2'])
|
||||
|
||||
def test_convert_list_props_to_comma_separated_mix(self):
|
||||
data = {'prop1': 'val1',
|
||||
'prop2': [1, 2.5, 'item3']}
|
||||
result = utils.convert_list_props_to_comma_separated(data)
|
||||
self.assertEqual('val1', result['prop1'])
|
||||
self.assertEqual('1, 2.5, item3', result['prop2'])
|
||||
|
||||
def test_convert_list_props_to_comma_separated_partial(self):
|
||||
data = {'prop1': [1, 2, 3],
|
||||
'prop2': [1, 2.5, 'item3']}
|
||||
result = utils.convert_list_props_to_comma_separated(
|
||||
data, props=['prop2'])
|
||||
self.assertEqual([1, 2, 3], result['prop1'])
|
||||
self.assertEqual('1, 2.5, item3', result['prop2'])
|
||||
|
||||
|
||||
class CommonParamsForListTest(test_utils.BaseTestCase):
|
||||
def setUp(self):
|
||||
super(CommonParamsForListTest, self).setUp()
|
||||
self.args = mock.Mock(marker=None, limit=None, sort_key=None,
|
||||
sort_dir=None, detail=False, fields=None,
|
||||
spec=True)
|
||||
self.expected_params = {'detail': False}
|
||||
|
||||
def test_nothing_set(self):
|
||||
self.assertEqual(self.expected_params,
|
||||
utils.common_params_for_list(self.args, [], []))
|
||||
|
||||
def test_marker_and_limit(self):
|
||||
self.args.marker = 'foo'
|
||||
self.args.limit = 42
|
||||
self.expected_params.update({'marker': 'foo', 'limit': 42})
|
||||
self.assertEqual(self.expected_params,
|
||||
utils.common_params_for_list(self.args, [], []))
|
||||
|
||||
def test_invalid_limit(self):
|
||||
self.args.limit = -42
|
||||
self.assertRaises(exc.CommandError,
|
||||
utils.common_params_for_list,
|
||||
self.args, [], [])
|
||||
|
||||
def test_sort_key_and_sort_dir(self):
|
||||
self.args.sort_key = 'field'
|
||||
self.args.sort_dir = 'desc'
|
||||
self.expected_params.update({'sort_key': 'field', 'sort_dir': 'desc'})
|
||||
self.assertEqual(self.expected_params,
|
||||
utils.common_params_for_list(self.args,
|
||||
['field'],
|
||||
[]))
|
||||
|
||||
def test_sort_key_allows_label(self):
|
||||
self.args.sort_key = 'Label'
|
||||
self.expected_params.update({'sort_key': 'field'})
|
||||
self.assertEqual(self.expected_params,
|
||||
utils.common_params_for_list(self.args,
|
||||
['field', 'field2'],
|
||||
['Label', 'Label2']))
|
||||
|
||||
def test_sort_key_invalid(self):
|
||||
self.args.sort_key = 'something'
|
||||
self.assertRaises(exc.CommandError,
|
||||
utils.common_params_for_list,
|
||||
self.args,
|
||||
['field', 'field2'],
|
||||
[])
|
||||
|
||||
def test_sort_dir_invalid(self):
|
||||
self.args.sort_dir = 'something'
|
||||
self.assertRaises(exc.CommandError,
|
||||
utils.common_params_for_list,
|
||||
self.args,
|
||||
[],
|
||||
[])
|
||||
|
||||
def test_detail(self):
|
||||
self.args.detail = True
|
||||
self.expected_params['detail'] = True
|
||||
self.assertEqual(self.expected_params,
|
||||
utils.common_params_for_list(self.args, [], []))
|
||||
|
||||
def test_fields(self):
|
||||
self.args.fields = [['a', 'b', 'c']]
|
||||
self.expected_params.update({'fields': ['a', 'b', 'c']})
|
||||
self.assertEqual(self.expected_params,
|
||||
utils.common_params_for_list(self.args, [], []))
|
||||
|
||||
|
||||
class CommonFiltersTest(test_utils.BaseTestCase):
|
||||
def test_limit(self):
|
||||
result = utils.common_filters(limit=42)
|
||||
self.assertEqual(['limit=42'], result)
|
||||
|
||||
def test_limit_0(self):
|
||||
result = utils.common_filters(limit=0)
|
||||
self.assertEqual([], result)
|
||||
|
||||
def test_other(self):
|
||||
for key in ('marker', 'sort_key', 'sort_dir'):
|
||||
result = utils.common_filters(**{key: 'test'})
|
||||
self.assertEqual(['%s=test' % key], result)
|
||||
|
||||
def test_fields(self):
|
||||
result = utils.common_filters(fields=['a', 'b', 'c'])
|
||||
self.assertEqual(['fields=a,b,c'], result)
|
||||
|
||||
|
||||
@mock.patch.object(subprocess, 'Popen', autospec=True)
|
||||
class MakeConfigDriveTest(test_utils.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(MakeConfigDriveTest, self).setUp()
|
||||
# expected genisoimage cmd
|
||||
self.genisoimage_cmd = ['genisoimage', '-o', mock.ANY,
|
||||
'-ldots', '-allow-lowercase',
|
||||
'-allow-multidot', '-l',
|
||||
'-publisher', 'ironicclient-configdrive 0.1',
|
||||
'-quiet', '-J', '-r', '-V',
|
||||
'config-2', mock.ANY]
|
||||
|
||||
def test_make_configdrive(self, mock_popen):
|
||||
fake_process = mock.Mock(returncode=0)
|
||||
fake_process.communicate.return_value = ('', '')
|
||||
mock_popen.return_value = fake_process
|
||||
|
||||
with utils.tempdir() as dirname:
|
||||
utils.make_configdrive(dirname)
|
||||
|
||||
mock_popen.assert_called_once_with(self.genisoimage_cmd,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE)
|
||||
fake_process.communicate.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(os, 'access', autospec=True)
|
||||
def test_make_configdrive_non_readable_dir(self, mock_access, mock_popen):
|
||||
mock_access.return_value = False
|
||||
self.assertRaises(exc.CommandError, utils.make_configdrive, 'fake-dir')
|
||||
mock_access.assert_called_once_with('fake-dir', os.R_OK)
|
||||
self.assertFalse(mock_popen.called)
|
||||
|
||||
@mock.patch.object(os, 'access', autospec=True)
|
||||
def test_make_configdrive_oserror(self, mock_access, mock_popen):
|
||||
mock_access.return_value = True
|
||||
mock_popen.side_effect = OSError('boom')
|
||||
|
||||
self.assertRaises(exc.CommandError, utils.make_configdrive, 'fake-dir')
|
||||
mock_access.assert_called_once_with('fake-dir', os.R_OK)
|
||||
mock_popen.assert_called_once_with(self.genisoimage_cmd,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE)
|
||||
|
||||
@mock.patch.object(os, 'access', autospec=True)
|
||||
def test_make_configdrive_non_zero_returncode(self, mock_access,
|
||||
mock_popen):
|
||||
fake_process = mock.Mock(returncode=123)
|
||||
fake_process.communicate.return_value = ('', '')
|
||||
mock_popen.return_value = fake_process
|
||||
|
||||
self.assertRaises(exc.CommandError, utils.make_configdrive, 'fake-dir')
|
||||
mock_access.assert_called_once_with('fake-dir', os.R_OK)
|
||||
mock_popen.assert_called_once_with(self.genisoimage_cmd,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE)
|
||||
fake_process.communicate.assert_called_once_with()
|
||||
|
||||
|
||||
class GetFromStdinTest(test_utils.BaseTestCase):
|
||||
|
||||
@mock.patch.object(sys, 'stdin', autospec=True)
|
||||
def test_get_from_stdin(self, mock_stdin):
|
||||
contents = '[{"step": "upgrade", "interface": "deploy"}]'
|
||||
mock_stdin.read.return_value = contents
|
||||
desc = 'something'
|
||||
|
||||
info = utils.get_from_stdin(desc)
|
||||
self.assertEqual(info, contents)
|
||||
mock_stdin.read.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(sys, 'stdin', autospec=True)
|
||||
def test_get_from_stdin_fail(self, mock_stdin):
|
||||
mock_stdin.read.side_effect = IOError
|
||||
desc = 'something'
|
||||
|
||||
self.assertRaises(exc.InvalidAttribute, utils.get_from_stdin, desc)
|
||||
mock_stdin.read.assert_called_once_with()
|
||||
|
||||
|
||||
class HandleJsonFileTest(test_utils.BaseTestCase):
|
||||
|
||||
def test_handle_json_or_file_arg(self):
|
||||
cleansteps = '[{"step": "upgrade", "interface": "deploy"}]'
|
||||
steps = utils.handle_json_or_file_arg(cleansteps)
|
||||
self.assertEqual(json.loads(cleansteps), steps)
|
||||
|
||||
def test_handle_json_or_file_arg_bad_json(self):
|
||||
cleansteps = 'foo'
|
||||
self.assertRaisesRegex(exc.InvalidAttribute,
|
||||
'For JSON',
|
||||
utils.handle_json_or_file_arg, cleansteps)
|
||||
|
||||
def test_handle_json_or_file_arg_file(self):
|
||||
contents = '[{"step": "upgrade", "interface": "deploy"}]'
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode='w') as f:
|
||||
f.write(contents)
|
||||
f.flush()
|
||||
steps = utils.handle_json_or_file_arg(f.name)
|
||||
|
||||
self.assertEqual(json.loads(contents), steps)
|
||||
|
||||
@mock.patch.object(__builtin__, 'open', autospec=True)
|
||||
def test_handle_json_or_file_arg_file_fail(self, mock_open):
|
||||
mock_file_object = mock.MagicMock()
|
||||
mock_file_handle = mock.MagicMock()
|
||||
mock_file_handle.__enter__.return_value = mock_file_object
|
||||
mock_open.return_value = mock_file_handle
|
||||
mock_file_object.read.side_effect = IOError
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode='w') as f:
|
||||
self.assertRaisesRegex(exc.InvalidAttribute,
|
||||
"from file",
|
||||
utils.handle_json_or_file_arg, f.name)
|
||||
mock_open.assert_called_once_with(f.name, 'r')
|
||||
mock_file_object.read.assert_called_once_with()
|
@ -1,60 +0,0 @@
|
||||
#
|
||||
# Copyright 2015 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
AUTH_TOKEN = "foobar"
|
||||
AUTH_URL = "http://0.0.0.0"
|
||||
|
||||
|
||||
class FakeApp(object):
|
||||
def __init__(self):
|
||||
_stdout = None
|
||||
self.client_manager = None
|
||||
self.stdin = sys.stdin
|
||||
self.stdout = _stdout or sys.stdout
|
||||
self.stderr = sys.stderr
|
||||
self.restapi = None
|
||||
|
||||
|
||||
class FakeClientManager(object):
|
||||
def __init__(self):
|
||||
self.identity = None
|
||||
self.auth_ref = None
|
||||
self.interface = 'public'
|
||||
self._region_name = 'RegionOne'
|
||||
self.session = 'fake session'
|
||||
self._api_version = {'baremetal': '1.6'}
|
||||
|
||||
|
||||
class FakeResource(object):
|
||||
def __init__(self, manager, info, loaded=False):
|
||||
self.__name__ = type(self).__name__
|
||||
self.manager = manager
|
||||
self._info = info
|
||||
self._add_details(info)
|
||||
self._loaded = loaded
|
||||
|
||||
def _add_details(self, info):
|
||||
for (k, v) in info.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
def __repr__(self):
|
||||
reprkeys = sorted(k for k in self.__dict__.keys() if k[0] != '_' and
|
||||
k != 'manager')
|
||||
info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys)
|
||||
return "<%s %s>" % (self.__class__.__name__, info)
|
@ -1,110 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import argparse
|
||||
|
||||
import mock
|
||||
import testtools
|
||||
|
||||
from ironicclient.common import http
|
||||
from ironicclient.osc import plugin
|
||||
from ironicclient.tests.unit.osc import fakes
|
||||
from ironicclient.v1 import client
|
||||
|
||||
|
||||
class MakeClientTest(testtools.TestCase):
|
||||
|
||||
@mock.patch.object(plugin, 'OS_BAREMETAL_API_VERSION_SPECIFIED', new=True)
|
||||
@mock.patch.object(plugin.LOG, 'warning', autospec=True)
|
||||
@mock.patch.object(client, 'Client', autospec=True)
|
||||
def test_make_client(self, mock_client, mock_warning):
|
||||
instance = fakes.FakeClientManager()
|
||||
instance.get_endpoint_for_service_type = mock.Mock(
|
||||
return_value='endpoint')
|
||||
plugin.make_client(instance)
|
||||
mock_client.assert_called_once_with(os_ironic_api_version='1.6',
|
||||
session=instance.session,
|
||||
region_name=instance._region_name,
|
||||
endpoint='endpoint')
|
||||
self.assertFalse(mock_warning.called)
|
||||
instance.get_endpoint_for_service_type.assert_called_once_with(
|
||||
'baremetal', region_name=instance._region_name,
|
||||
interface=instance.interface)
|
||||
|
||||
@mock.patch.object(plugin, 'OS_BAREMETAL_API_VERSION_SPECIFIED', new=False)
|
||||
@mock.patch.object(plugin.LOG, 'warning', autospec=True)
|
||||
@mock.patch.object(client, 'Client', autospec=True)
|
||||
def test_make_client_log_warning_no_version_specified(self, mock_client,
|
||||
mock_warning):
|
||||
instance = fakes.FakeClientManager()
|
||||
instance.get_endpoint_for_service_type = mock.Mock(
|
||||
return_value='endpoint')
|
||||
instance._api_version = {'baremetal': http.DEFAULT_VER}
|
||||
plugin.make_client(instance)
|
||||
mock_client.assert_called_once_with(
|
||||
os_ironic_api_version=http.DEFAULT_VER,
|
||||
session=instance.session,
|
||||
region_name=instance._region_name,
|
||||
endpoint='endpoint')
|
||||
self.assertTrue(mock_warning.called)
|
||||
instance.get_endpoint_for_service_type.assert_called_once_with(
|
||||
'baremetal', region_name=instance._region_name,
|
||||
interface=instance.interface)
|
||||
|
||||
@mock.patch.object(plugin.utils, 'env', lambda x: '1.29')
|
||||
@mock.patch.object(plugin, 'OS_BAREMETAL_API_VERSION_SPECIFIED', new=False)
|
||||
@mock.patch.object(plugin.LOG, 'warning', autospec=True)
|
||||
@mock.patch.object(client, 'Client', autospec=True)
|
||||
def test_make_client_version_from_env_no_warning(self, mock_client,
|
||||
mock_warning):
|
||||
instance = fakes.FakeClientManager()
|
||||
instance.get_endpoint_for_service_type = mock.Mock(
|
||||
return_value='endpoint')
|
||||
plugin.make_client(instance)
|
||||
self.assertFalse(mock_warning.called)
|
||||
|
||||
|
||||
class BuildOptionParserTest(testtools.TestCase):
|
||||
|
||||
@mock.patch.object(argparse.ArgumentParser, 'add_argument')
|
||||
def test_build_option_parser(self, mock_add_argument):
|
||||
parser = argparse.ArgumentParser()
|
||||
mock_add_argument.reset_mock()
|
||||
plugin.build_option_parser(parser)
|
||||
version_list = ['1'] + ['1.%d' % i for i in range(
|
||||
1, plugin.LAST_KNOWN_API_VERSION + 1)] + ['latest']
|
||||
mock_add_argument.assert_called_once_with(
|
||||
'--os-baremetal-api-version', action=plugin.ReplaceLatestVersion,
|
||||
choices=version_list, default=http.DEFAULT_VER, help=mock.ANY,
|
||||
metavar='<baremetal-api-version>')
|
||||
|
||||
|
||||
class ReplaceLatestVersionTest(testtools.TestCase):
|
||||
|
||||
def test___call___latest(self):
|
||||
parser = argparse.ArgumentParser()
|
||||
plugin.build_option_parser(parser)
|
||||
namespace = argparse.Namespace()
|
||||
parser.parse_known_args(['--os-baremetal-api-version', 'latest'],
|
||||
namespace)
|
||||
self.assertEqual('1.%d' % plugin.LAST_KNOWN_API_VERSION,
|
||||
namespace.os_baremetal_api_version)
|
||||
self.assertTrue(plugin.OS_BAREMETAL_API_VERSION_SPECIFIED)
|
||||
|
||||
def test___call___specific_version(self):
|
||||
parser = argparse.ArgumentParser()
|
||||
plugin.build_option_parser(parser)
|
||||
namespace = argparse.Namespace()
|
||||
parser.parse_known_args(['--os-baremetal-api-version', '1.4'],
|
||||
namespace)
|
||||
self.assertEqual('1.4', namespace.os_baremetal_api_version)
|
||||
self.assertTrue(plugin.OS_BAREMETAL_API_VERSION_SPECIFIED)
|
@ -1,185 +0,0 @@
|
||||
#
|
||||
# Copyright 2015 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
import mock
|
||||
from osc_lib.tests import utils
|
||||
|
||||
from ironicclient.tests.unit.osc import fakes
|
||||
|
||||
baremetal_chassis_uuid = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
|
||||
baremetal_chassis_uuid_empty = ''
|
||||
baremetal_properties_empty = ''
|
||||
baremetal_chassis_description = 'chassis description'
|
||||
baremetal_chassis_extra = {}
|
||||
BAREMETAL_CHASSIS = {
|
||||
'uuid': baremetal_chassis_uuid,
|
||||
'description': baremetal_chassis_description,
|
||||
'extra': baremetal_chassis_extra,
|
||||
}
|
||||
|
||||
baremetal_uuid = 'xxx-xxxxxx-xxxx'
|
||||
baremetal_name = 'fake name'
|
||||
baremetal_instance_uuid = 'yyy-yyyyyy-yyyy'
|
||||
baremetal_power_state = None
|
||||
baremetal_provision_state = None
|
||||
baremetal_maintenance = False
|
||||
|
||||
BAREMETAL = {
|
||||
'uuid': baremetal_uuid,
|
||||
'name': baremetal_name,
|
||||
'instance_uuid': baremetal_instance_uuid,
|
||||
'power_state': baremetal_power_state,
|
||||
'provision_state': baremetal_provision_state,
|
||||
'maintenance': baremetal_maintenance,
|
||||
'links': [],
|
||||
'volume': [],
|
||||
}
|
||||
|
||||
baremetal_port_uuid = 'zzz-zzzzzz-zzzz'
|
||||
baremetal_port_address = 'AA:BB:CC:DD:EE:FF'
|
||||
baremetal_port_extra = {'key1': 'value1',
|
||||
'key2': 'value2'}
|
||||
baremetal_port_physical_network = 'physnet1'
|
||||
|
||||
BAREMETAL_PORT = {
|
||||
'uuid': baremetal_port_uuid,
|
||||
'address': baremetal_port_address,
|
||||
'extra': baremetal_port_extra,
|
||||
'node_uuid': baremetal_uuid,
|
||||
}
|
||||
|
||||
baremetal_driver_hosts = ['fake-host1', 'fake-host2']
|
||||
baremetal_driver_name = 'fakedrivername'
|
||||
baremetal_driver_type = 'classic'
|
||||
baremetal_driver_default_boot_if = 'boot'
|
||||
baremetal_driver_default_console_if = 'console'
|
||||
baremetal_driver_default_deploy_if = 'deploy'
|
||||
baremetal_driver_default_inspect_if = 'inspect'
|
||||
baremetal_driver_default_management_if = 'management'
|
||||
baremetal_driver_default_network_if = 'network'
|
||||
baremetal_driver_default_power_if = 'power'
|
||||
baremetal_driver_default_raid_if = 'raid'
|
||||
baremetal_driver_default_storage_if = 'storage'
|
||||
baremetal_driver_default_vendor_if = 'vendor'
|
||||
baremetal_driver_enabled_boot_ifs = ['boot', 'boot2']
|
||||
baremetal_driver_enabled_console_ifs = ['console', 'console2']
|
||||
baremetal_driver_enabled_deploy_ifs = ['deploy', 'deploy2']
|
||||
baremetal_driver_enabled_inspect_ifs = ['inspect', 'inspect2']
|
||||
baremetal_driver_enabled_management_ifs = ['management', 'management2']
|
||||
baremetal_driver_enabled_network_ifs = ['network', 'network2']
|
||||
baremetal_driver_enabled_power_ifs = ['power', 'power2']
|
||||
baremetal_driver_enabled_raid_ifs = ['raid', 'raid2']
|
||||
baremetal_driver_enabled_storage_ifs = ['storage', 'storage2']
|
||||
baremetal_driver_enabled_vendor_ifs = ['vendor', 'vendor2']
|
||||
|
||||
BAREMETAL_DRIVER = {
|
||||
'hosts': baremetal_driver_hosts,
|
||||
'name': baremetal_driver_name,
|
||||
'type': baremetal_driver_type,
|
||||
'default_boot_interface': baremetal_driver_default_boot_if,
|
||||
'default_console_interface': baremetal_driver_default_console_if,
|
||||
'default_deploy_interface': baremetal_driver_default_deploy_if,
|
||||
'default_inspect_interface': baremetal_driver_default_inspect_if,
|
||||
'default_management_interface': baremetal_driver_default_management_if,
|
||||
'default_network_interface': baremetal_driver_default_network_if,
|
||||
'default_power_interface': baremetal_driver_default_power_if,
|
||||
'default_raid_interface': baremetal_driver_default_raid_if,
|
||||
'default_storage_interface': baremetal_driver_default_storage_if,
|
||||
'default_vendor_interface': baremetal_driver_default_vendor_if,
|
||||
'enabled_boot_interfaces': baremetal_driver_enabled_boot_ifs,
|
||||
'enabled_console_interfaces': baremetal_driver_enabled_console_ifs,
|
||||
'enabled_deploy_interfaces': baremetal_driver_enabled_deploy_ifs,
|
||||
'enabled_inspect_interfaces': baremetal_driver_enabled_inspect_ifs,
|
||||
'enabled_management_interfaces': baremetal_driver_enabled_management_ifs,
|
||||
'enabled_network_interfaces': baremetal_driver_enabled_network_ifs,
|
||||
'enabled_power_interfaces': baremetal_driver_enabled_power_ifs,
|
||||
'enabled_raid_interfaces': baremetal_driver_enabled_raid_ifs,
|
||||
'enabled_storage_interfaces': baremetal_driver_enabled_storage_ifs,
|
||||
'enabled_vendor_interfaces': baremetal_driver_enabled_vendor_ifs,
|
||||
}
|
||||
|
||||
baremetal_driver_passthru_method = 'lookup'
|
||||
|
||||
BAREMETAL_DRIVER_PASSTHRU = {"lookup": {"attach": "false",
|
||||
"http_methods": ["POST"],
|
||||
"description": "",
|
||||
"async": "false"}}
|
||||
|
||||
baremetal_portgroup_uuid = 'ppp-gggggg-pppp'
|
||||
baremetal_portgroup_name = 'Portgroup-name'
|
||||
baremetal_portgroup_address = 'AA:BB:CC:CC:BB:AA'
|
||||
baremetal_portgroup_mode = 'active-backup'
|
||||
baremetal_portgroup_extra = {'key1': 'value1',
|
||||
'key2': 'value2'}
|
||||
baremetal_portgroup_properties = {'key1': 'value11',
|
||||
'key2': 'value22'}
|
||||
|
||||
PORTGROUP = {'uuid': baremetal_portgroup_uuid,
|
||||
'name': baremetal_portgroup_name,
|
||||
'node_uuid': baremetal_uuid,
|
||||
'address': baremetal_portgroup_address,
|
||||
'extra': baremetal_portgroup_extra,
|
||||
'mode': baremetal_portgroup_mode,
|
||||
'properties': baremetal_portgroup_properties,
|
||||
}
|
||||
|
||||
VIFS = {'vifs': [{'id': 'aaa-aa'}]}
|
||||
|
||||
baremetal_volume_connector_uuid = 'vvv-cccccc-vvvv'
|
||||
baremetal_volume_connector_type = 'iqn'
|
||||
baremetal_volume_connector_connector_id = 'iqn.2017-01.connector'
|
||||
baremetal_volume_connector_extra = {'key1': 'value1',
|
||||
'key2': 'value2'}
|
||||
VOLUME_CONNECTOR = {
|
||||
'uuid': baremetal_volume_connector_uuid,
|
||||
'node_uuid': baremetal_uuid,
|
||||
'type': baremetal_volume_connector_type,
|
||||
'connector_id': baremetal_volume_connector_connector_id,
|
||||
'extra': baremetal_volume_connector_extra,
|
||||
}
|
||||
|
||||
baremetal_volume_target_uuid = 'vvv-tttttt-vvvv'
|
||||
baremetal_volume_target_volume_type = 'iscsi'
|
||||
baremetal_volume_target_boot_index = 0
|
||||
baremetal_volume_target_volume_id = 'vvv-tttttt-iii'
|
||||
baremetal_volume_target_extra = {'key1': 'value1',
|
||||
'key2': 'value2'}
|
||||
baremetal_volume_target_properties = {'key11': 'value11',
|
||||
'key22': 'value22'}
|
||||
VOLUME_TARGET = {
|
||||
'uuid': baremetal_volume_target_uuid,
|
||||
'node_uuid': baremetal_uuid,
|
||||
'volume_type': baremetal_volume_target_volume_type,
|
||||
'boot_index': baremetal_volume_target_boot_index,
|
||||
'volume_id': baremetal_volume_target_volume_id,
|
||||
'extra': baremetal_volume_target_extra,
|
||||
'properties': baremetal_volume_target_properties,
|
||||
}
|
||||
|
||||
|
||||
class TestBaremetal(utils.TestCommand):
|
||||
|
||||
def setUp(self):
|
||||
super(TestBaremetal, self).setUp()
|
||||
|
||||
self.app.client_manager.auth_ref = mock.Mock(auth_token="TOKEN")
|
||||
self.app.client_manager.baremetal = mock.Mock()
|
||||
|
||||
|
||||
class FakeBaremetalResource(fakes.FakeResource):
|
||||
|
||||
def get_keys(self):
|
||||
return {'property': 'value'}
|
@ -1,591 +0,0 @@
|
||||
#
|
||||
# Copyright 2016 Intel Corporation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
import copy
|
||||
import mock
|
||||
|
||||
from osc_lib.tests import utils as oscutils
|
||||
|
||||
from ironicclient import exc
|
||||
from ironicclient.osc.v1 import baremetal_chassis
|
||||
from ironicclient.tests.unit.osc.v1 import fakes as baremetal_fakes
|
||||
|
||||
|
||||
class TestChassis(baremetal_fakes.TestBaremetal):
|
||||
|
||||
def setUp(self):
|
||||
super(TestChassis, self).setUp()
|
||||
|
||||
# Get a shortcut to the baremetal manager mock
|
||||
self.baremetal_mock = self.app.client_manager.baremetal
|
||||
self.baremetal_mock.reset_mock()
|
||||
|
||||
|
||||
class TestChassisCreate(TestChassis):
|
||||
def setUp(self):
|
||||
super(TestChassisCreate, self).setUp()
|
||||
|
||||
self.baremetal_mock.chassis.create.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_CHASSIS),
|
||||
loaded=True,
|
||||
))
|
||||
|
||||
# Get the command object to test
|
||||
self.cmd = baremetal_chassis.CreateBaremetalChassis(self.app, None)
|
||||
self.arglist = []
|
||||
self.verifylist = []
|
||||
self.collist = (
|
||||
'description',
|
||||
'extra',
|
||||
'uuid',
|
||||
)
|
||||
self.datalist = (
|
||||
baremetal_fakes.baremetal_chassis_description,
|
||||
baremetal_fakes.baremetal_chassis_extra,
|
||||
baremetal_fakes.baremetal_chassis_uuid,
|
||||
)
|
||||
self.actual_kwargs = {}
|
||||
|
||||
def check_with_options(self, addl_arglist, addl_verifylist, addl_kwargs):
|
||||
arglist = copy.copy(self.arglist) + addl_arglist
|
||||
verifylist = copy.copy(self.verifylist) + addl_verifylist
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
collist = copy.copy(self.collist)
|
||||
self.assertEqual(collist, columns)
|
||||
self.assertNotIn('nodes', columns)
|
||||
|
||||
datalist = copy.copy(self.datalist)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
kwargs = copy.copy(self.actual_kwargs)
|
||||
kwargs.update(addl_kwargs)
|
||||
|
||||
self.baremetal_mock.chassis.create.assert_called_once_with(**kwargs)
|
||||
|
||||
def test_chassis_create_no_options(self):
|
||||
self.check_with_options([], [], {})
|
||||
|
||||
def test_chassis_create_with_description(self):
|
||||
description = 'chassis description'
|
||||
self.check_with_options(['--description', description],
|
||||
[('description', description)],
|
||||
{'description': description})
|
||||
|
||||
def test_chassis_create_with_extra(self):
|
||||
extra1 = 'arg1=val1'
|
||||
extra2 = 'arg2=val2'
|
||||
self.check_with_options(['--extra', extra1,
|
||||
'--extra', extra2],
|
||||
[('extra', [extra1, extra2])],
|
||||
{'extra': {
|
||||
'arg1': 'val1',
|
||||
'arg2': 'val2'}})
|
||||
|
||||
def test_chassis_create_with_uuid(self):
|
||||
uuid = baremetal_fakes.baremetal_chassis_uuid
|
||||
self.check_with_options(['--uuid', uuid],
|
||||
[('uuid', uuid)],
|
||||
{'uuid': uuid})
|
||||
|
||||
|
||||
class TestChassisDelete(TestChassis):
|
||||
def setUp(self):
|
||||
super(TestChassisDelete, self).setUp()
|
||||
|
||||
self.baremetal_mock.chassis.get.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_CHASSIS),
|
||||
loaded=True,
|
||||
))
|
||||
|
||||
# Get the command object to test
|
||||
self.cmd = baremetal_chassis.DeleteBaremetalChassis(self.app, None)
|
||||
|
||||
def test_chassis_delete(self):
|
||||
uuid = baremetal_fakes.baremetal_chassis_uuid
|
||||
arglist = [uuid]
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.baremetal_mock.chassis.delete.assert_called_with(uuid)
|
||||
|
||||
def test_chassis_delete_multiple(self):
|
||||
uuid1 = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
|
||||
uuid2 = '11111111-2222-3333-4444-555555555555'
|
||||
arglist = [uuid1, uuid2]
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = [uuid1, uuid2]
|
||||
self.baremetal_mock.chassis.delete.has_calls(
|
||||
[mock.call(x) for x in args]
|
||||
)
|
||||
self.assertEqual(2, self.baremetal_mock.chassis.delete.call_count)
|
||||
|
||||
def test_chassis_delete_multiple_with_failure(self):
|
||||
uuid1 = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
|
||||
uuid2 = '11111111-2222-3333-4444-555555555555'
|
||||
arglist = [uuid1, uuid2]
|
||||
verifylist = []
|
||||
|
||||
self.baremetal_mock.chassis.delete.side_effect = [
|
||||
'', exc.ClientException]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.assertRaises(exc.ClientException,
|
||||
self.cmd.take_action,
|
||||
parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = [uuid1, uuid2]
|
||||
self.baremetal_mock.chassis.delete.has_calls(
|
||||
[mock.call(x) for x in args]
|
||||
)
|
||||
self.assertEqual(2, self.baremetal_mock.chassis.delete.call_count)
|
||||
|
||||
|
||||
class TestChassisList(TestChassis):
|
||||
|
||||
def setUp(self):
|
||||
super(TestChassisList, self).setUp()
|
||||
|
||||
self.baremetal_mock.chassis.list.return_value = [
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_CHASSIS),
|
||||
loaded=True,
|
||||
),
|
||||
]
|
||||
|
||||
# Get the command object to test
|
||||
self.cmd = baremetal_chassis.ListBaremetalChassis(self.app, None)
|
||||
|
||||
def test_chassis_list_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
}
|
||||
|
||||
self.baremetal_mock.chassis.list.assert_called_with(
|
||||
**kwargs
|
||||
)
|
||||
|
||||
collist = (
|
||||
"UUID",
|
||||
"Description",
|
||||
)
|
||||
self.assertEqual(collist, columns)
|
||||
datalist = ((
|
||||
baremetal_fakes.baremetal_chassis_uuid,
|
||||
baremetal_fakes.baremetal_chassis_description,
|
||||
), )
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_chassis_list_long(self):
|
||||
arglist = [
|
||||
'--long',
|
||||
]
|
||||
verifylist = [
|
||||
('long', True),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
kwargs = {
|
||||
'detail': True,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
}
|
||||
|
||||
self.baremetal_mock.chassis.list.assert_called_with(
|
||||
**kwargs
|
||||
)
|
||||
|
||||
collist = ('UUID', 'Description', 'Created At', 'Updated At', 'Extra')
|
||||
self.assertEqual(collist, columns)
|
||||
datalist = ((
|
||||
baremetal_fakes.baremetal_chassis_uuid,
|
||||
baremetal_fakes.baremetal_chassis_description,
|
||||
'',
|
||||
'',
|
||||
baremetal_fakes.baremetal_chassis_extra,
|
||||
), )
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_chassis_list_fields(self):
|
||||
arglist = [
|
||||
'--fields', 'uuid', 'extra',
|
||||
]
|
||||
verifylist = [
|
||||
('fields', [['uuid', 'extra']]),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'detail': False,
|
||||
'fields': ('uuid', 'extra'),
|
||||
}
|
||||
|
||||
self.baremetal_mock.chassis.list.assert_called_with(
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def test_chassis_list_fields_multiple(self):
|
||||
arglist = [
|
||||
'--fields', 'uuid', 'description',
|
||||
'--fields', 'extra',
|
||||
]
|
||||
verifylist = [
|
||||
('fields', [['uuid', 'description'], ['extra']])
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'detail': False,
|
||||
'fields': ('uuid', 'description', 'extra')
|
||||
}
|
||||
|
||||
self.baremetal_mock.chassis.list.assert_called_with(
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def test_chassis_list_invalid_fields(self):
|
||||
arglist = [
|
||||
'--fields', 'uuid', 'invalid'
|
||||
]
|
||||
verifylist = [
|
||||
('fields', [['uuid', 'invalid']])
|
||||
]
|
||||
|
||||
self.assertRaises(oscutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_chassis_list_long_and_fields(self):
|
||||
arglist = [
|
||||
'--long',
|
||||
'--fields', 'uuid', 'invalid'
|
||||
]
|
||||
verifylist = [
|
||||
('long', True),
|
||||
('fields', [['uuid', 'invalid']])
|
||||
]
|
||||
|
||||
self.assertRaises(oscutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestChassisSet(TestChassis):
|
||||
def setUp(self):
|
||||
super(TestChassisSet, self).setUp()
|
||||
|
||||
self.baremetal_mock.chassis.update.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_CHASSIS),
|
||||
loaded=True,
|
||||
))
|
||||
|
||||
# Get the command object to test
|
||||
self.cmd = baremetal_chassis.SetBaremetalChassis(self.app, None)
|
||||
|
||||
def test_chassis_set_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
self.assertRaises(oscutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_chassis_set_no_property(self):
|
||||
uuid = baremetal_fakes.baremetal_chassis_uuid
|
||||
arglist = [uuid]
|
||||
verifylist = [('chassis', uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.assertFalse(self.baremetal_mock.chassis.update.called)
|
||||
|
||||
def test_chassis_set_description(self):
|
||||
description = 'new description'
|
||||
uuid = baremetal_fakes.baremetal_chassis_uuid
|
||||
arglist = [
|
||||
uuid,
|
||||
'--description', 'new description',
|
||||
]
|
||||
verifylist = [
|
||||
('chassis', uuid),
|
||||
('description', description)
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.baremetal_mock.chassis.update.assert_called_once_with(
|
||||
uuid,
|
||||
[{'path': '/description', 'value': description, 'op': 'add'}]
|
||||
)
|
||||
|
||||
def test_chassis_set_extra(self):
|
||||
uuid = baremetal_fakes.baremetal_chassis_uuid
|
||||
extra = 'foo=bar'
|
||||
arglist = [
|
||||
uuid,
|
||||
'--extra', extra,
|
||||
]
|
||||
verifylist = [
|
||||
('chassis', uuid),
|
||||
('extra', [extra])
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.baremetal_mock.chassis.update.assert_called_once_with(
|
||||
uuid,
|
||||
[{'path': '/extra/foo', 'value': 'bar', 'op': 'add'}]
|
||||
)
|
||||
|
||||
|
||||
class TestChassisShow(TestChassis):
|
||||
def setUp(self):
|
||||
super(TestChassisShow, self).setUp()
|
||||
|
||||
self.baremetal_mock.chassis.get.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_CHASSIS),
|
||||
loaded=True,
|
||||
))
|
||||
|
||||
# Get the command object to test
|
||||
self.cmd = baremetal_chassis.ShowBaremetalChassis(self.app, None)
|
||||
|
||||
def test_chassis_show(self):
|
||||
arglist = [baremetal_fakes.baremetal_chassis_uuid]
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = [baremetal_fakes.baremetal_chassis_uuid]
|
||||
|
||||
self.baremetal_mock.chassis.get.assert_called_with(
|
||||
*args, fields=None
|
||||
)
|
||||
|
||||
collist = (
|
||||
'description',
|
||||
'extra',
|
||||
'uuid'
|
||||
)
|
||||
self.assertEqual(collist, columns)
|
||||
self.assertNotIn('nodes', columns)
|
||||
datalist = (
|
||||
baremetal_fakes.baremetal_chassis_description,
|
||||
baremetal_fakes.baremetal_chassis_extra,
|
||||
baremetal_fakes.baremetal_chassis_uuid,
|
||||
)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_chassis_show_no_chassis(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
self.assertRaises(oscutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_chassis_show_fields(self):
|
||||
uuid = baremetal_fakes.baremetal_chassis_uuid
|
||||
arglist = [
|
||||
uuid,
|
||||
'--fields', 'uuid', 'description',
|
||||
]
|
||||
verifylist = [
|
||||
('chassis', uuid),
|
||||
('fields', [['uuid', 'description']]),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = [uuid]
|
||||
fields = ['uuid', 'description']
|
||||
|
||||
self.baremetal_mock.chassis.get.assert_called_with(
|
||||
*args, fields=fields
|
||||
)
|
||||
|
||||
def test_chassis_show_fields_multiple(self):
|
||||
uuid = baremetal_fakes.baremetal_chassis_uuid
|
||||
arglist = [
|
||||
uuid,
|
||||
'--fields', 'uuid', 'description',
|
||||
'--fields', 'extra',
|
||||
]
|
||||
verifylist = [
|
||||
('chassis', uuid),
|
||||
('fields', [['uuid', 'description'], ['extra']])
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = [uuid]
|
||||
fields = ['uuid', 'description', 'extra']
|
||||
|
||||
self.baremetal_mock.chassis.get.assert_called_with(
|
||||
*args, fields=fields
|
||||
)
|
||||
|
||||
def test_chassis_show_invalid_fields(self):
|
||||
uuid = baremetal_fakes.baremetal_chassis_uuid
|
||||
arglist = [
|
||||
uuid,
|
||||
'--fields', 'uuid', 'invalid'
|
||||
]
|
||||
verifylist = [
|
||||
('chassis', uuid),
|
||||
('fields', [['uuid', 'invalid']])
|
||||
]
|
||||
|
||||
self.assertRaises(oscutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestChassisUnset(TestChassis):
|
||||
def setUp(self):
|
||||
super(TestChassisUnset, self).setUp()
|
||||
|
||||
self.baremetal_mock.chassis.update.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_CHASSIS),
|
||||
loaded=True,
|
||||
))
|
||||
|
||||
# Get the command object to test
|
||||
self.cmd = baremetal_chassis.UnsetBaremetalChassis(self.app, None)
|
||||
|
||||
def test_chassis_unset_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
self.assertRaises(oscutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_chassis_unset_no_property(self):
|
||||
uuid = baremetal_fakes.baremetal_chassis_uuid
|
||||
arglist = [uuid]
|
||||
verifylist = [('chassis', uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.assertFalse(self.baremetal_mock.chassis.update.called)
|
||||
|
||||
def test_chassis_unset_description(self):
|
||||
uuid = baremetal_fakes.baremetal_chassis_uuid
|
||||
arglist = [
|
||||
uuid,
|
||||
'--description',
|
||||
]
|
||||
verifylist = [
|
||||
('chassis', uuid),
|
||||
('description', True)
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.baremetal_mock.chassis.update.assert_called_once_with(
|
||||
uuid,
|
||||
[{'path': '/description', 'op': 'remove'}]
|
||||
)
|
||||
|
||||
def test_chassis_unset_extra(self):
|
||||
uuid = baremetal_fakes.baremetal_chassis_uuid
|
||||
arglist = [
|
||||
uuid,
|
||||
'--extra', 'foo',
|
||||
]
|
||||
verifylist = [
|
||||
('chassis', uuid),
|
||||
('extra', ['foo'])
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.baremetal_mock.chassis.update.assert_called_once_with(
|
||||
uuid,
|
||||
[{'path': '/extra/foo', 'op': 'remove'}]
|
||||
)
|
@ -1,78 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
import copy
|
||||
import mock
|
||||
|
||||
from ironicclient import exc
|
||||
from ironicclient.osc.v1 import baremetal_create
|
||||
from ironicclient.tests.unit.osc.v1 import fakes as baremetal_fakes
|
||||
from ironicclient.v1 import create_resources
|
||||
|
||||
|
||||
class TestBaremetalCreate(baremetal_fakes.TestBaremetal):
|
||||
def setUp(self):
|
||||
super(TestBaremetalCreate, self).setUp()
|
||||
self.cmd = baremetal_create.CreateBaremetal(self.app, None)
|
||||
|
||||
def test_baremetal_create_with_driver(self):
|
||||
self.baremetal_mock = self.app.client_manager.baremetal
|
||||
self.baremetal_mock.reset_mock()
|
||||
self.baremetal_mock.node.create.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL),
|
||||
loaded=True,
|
||||
))
|
||||
|
||||
arglist = ['--driver', 'fake_driver']
|
||||
verifylist = [('driver', 'fake_driver')]
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
self.assertEqual(('chassis_uuid',
|
||||
'instance_uuid',
|
||||
'maintenance',
|
||||
'name',
|
||||
'power_state',
|
||||
'provision_state',
|
||||
'uuid'), columns)
|
||||
self.assertEqual(
|
||||
(baremetal_fakes.baremetal_chassis_uuid_empty,
|
||||
baremetal_fakes.baremetal_instance_uuid,
|
||||
baremetal_fakes.baremetal_maintenance,
|
||||
baremetal_fakes.baremetal_name,
|
||||
baremetal_fakes.baremetal_power_state,
|
||||
baremetal_fakes.baremetal_provision_state,
|
||||
baremetal_fakes.baremetal_uuid), tuple(data))
|
||||
|
||||
self.baremetal_mock.node.create.assert_called_once_with(
|
||||
driver='fake_driver')
|
||||
|
||||
def test_baremetal_create_no_args(self):
|
||||
parsed_args = self.check_parser(self.cmd, [], [])
|
||||
self.assertRaises(exc.ValidationError,
|
||||
self.cmd.take_action, parsed_args)
|
||||
|
||||
@mock.patch.object(create_resources, 'create_resources', autospec=True)
|
||||
def test_baremetal_create_resource_files(self, mock_create):
|
||||
arglist = ['file.yaml', 'file.json']
|
||||
verifylist = [('resource_files', ['file.yaml', 'file.json'])]
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
self.assertEqual((tuple(), tuple()), self.cmd.take_action(parsed_args))
|
||||
mock_create.assert_called_once_with(self.app.client_manager.baremetal,
|
||||
['file.yaml', 'file.json'])
|
@ -1,406 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
import copy
|
||||
|
||||
from osc_lib.tests import utils as oscutils
|
||||
|
||||
from ironicclient.osc.v1 import baremetal_driver
|
||||
from ironicclient.tests.unit.osc.v1 import fakes as baremetal_fakes
|
||||
|
||||
|
||||
class TestBaremetalDriver(baremetal_fakes.TestBaremetal):
|
||||
|
||||
def setUp(self):
|
||||
super(TestBaremetalDriver, self).setUp()
|
||||
|
||||
self.baremetal_mock = self.app.client_manager.baremetal
|
||||
self.baremetal_mock.reset_mock()
|
||||
|
||||
|
||||
class TestListBaremetalDriver(TestBaremetalDriver):
|
||||
|
||||
def setUp(self):
|
||||
super(TestListBaremetalDriver, self).setUp()
|
||||
|
||||
self.baremetal_mock.driver.list.return_value = [
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_DRIVER),
|
||||
loaded=True)
|
||||
]
|
||||
self.cmd = baremetal_driver.ListBaremetalDriver(self.app, None)
|
||||
|
||||
def test_baremetal_driver_list(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
collist = (
|
||||
"Supported driver(s)",
|
||||
"Active host(s)")
|
||||
self.assertEqual(collist, tuple(columns))
|
||||
|
||||
datalist = ((
|
||||
baremetal_fakes.baremetal_driver_name,
|
||||
', '.join(baremetal_fakes.baremetal_driver_hosts)), )
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_driver_list_with_type(self):
|
||||
arglist = ['--type', baremetal_fakes.baremetal_driver_type]
|
||||
verifylist = [('type', baremetal_fakes.baremetal_driver_type)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
collist = (
|
||||
"Supported driver(s)",
|
||||
"Active host(s)")
|
||||
self.assertEqual(collist, tuple(columns))
|
||||
|
||||
datalist = ((
|
||||
baremetal_fakes.baremetal_driver_name,
|
||||
', '.join(baremetal_fakes.baremetal_driver_hosts)),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_driver_list_with_detail(self):
|
||||
arglist = ['--long']
|
||||
verifylist = [('long', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
collist = (
|
||||
"Supported driver(s)",
|
||||
"Type",
|
||||
"Active host(s)",
|
||||
'Default Boot Interface',
|
||||
'Default Console Interface',
|
||||
'Default Deploy Interface',
|
||||
'Default Inspect Interface',
|
||||
'Default Management Interface',
|
||||
'Default Network Interface',
|
||||
'Default Power Interface',
|
||||
'Default RAID Interface',
|
||||
'Default Storage Interface',
|
||||
'Default Vendor Interface',
|
||||
'Enabled Boot Interfaces',
|
||||
'Enabled Console Interfaces',
|
||||
'Enabled Deploy Interfaces',
|
||||
'Enabled Inspect Interfaces',
|
||||
'Enabled Management Interfaces',
|
||||
'Enabled Network Interfaces',
|
||||
'Enabled Power Interfaces',
|
||||
'Enabled RAID Interfaces',
|
||||
'Enabled Storage Interfaces',
|
||||
'Enabled Vendor Interfaces'
|
||||
)
|
||||
self.assertEqual(collist, tuple(columns))
|
||||
|
||||
datalist = ((
|
||||
baremetal_fakes.baremetal_driver_name,
|
||||
baremetal_fakes.baremetal_driver_type,
|
||||
', '.join(baremetal_fakes.baremetal_driver_hosts),
|
||||
baremetal_fakes.baremetal_driver_default_boot_if,
|
||||
baremetal_fakes.baremetal_driver_default_console_if,
|
||||
baremetal_fakes.baremetal_driver_default_deploy_if,
|
||||
baremetal_fakes.baremetal_driver_default_inspect_if,
|
||||
baremetal_fakes.baremetal_driver_default_management_if,
|
||||
baremetal_fakes.baremetal_driver_default_network_if,
|
||||
baremetal_fakes.baremetal_driver_default_power_if,
|
||||
baremetal_fakes.baremetal_driver_default_raid_if,
|
||||
baremetal_fakes.baremetal_driver_default_storage_if,
|
||||
baremetal_fakes.baremetal_driver_default_vendor_if,
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_boot_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_console_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_deploy_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_inspect_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_management_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_network_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_power_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_raid_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_storage_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_vendor_ifs),
|
||||
),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
|
||||
class TestListBaremetalDriverProperty(TestBaremetalDriver):
|
||||
|
||||
def setUp(self):
|
||||
super(TestListBaremetalDriverProperty, self).setUp()
|
||||
|
||||
self.baremetal_mock.driver.properties.return_value = {
|
||||
'property1': 'description1', 'property2': 'description2'}
|
||||
self.cmd = baremetal_driver.ListBaremetalDriverProperty(
|
||||
self.app, None)
|
||||
|
||||
def test_baremetal_driver_property_list(self):
|
||||
arglist = ['fakedrivername']
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.driver.properties.assert_called_with(*arglist)
|
||||
|
||||
collist = ['Property', 'Description']
|
||||
self.assertEqual(collist, columns)
|
||||
expected_data = [('property1', 'description1'),
|
||||
('property2', 'description2')]
|
||||
self.assertEqual(expected_data, data)
|
||||
|
||||
def test_baremetal_driver_list_no_arg(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
self.assertRaises(oscutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestListBaremetalDriverRaidProperty(TestBaremetalDriver):
|
||||
|
||||
def setUp(self):
|
||||
super(TestListBaremetalDriverRaidProperty, self).setUp()
|
||||
|
||||
(self.baremetal_mock.driver.
|
||||
raid_logical_disk_properties.return_value) = {
|
||||
'RAIDProperty1': 'driver_raid_property1',
|
||||
'RAIDProperty2': 'driver_raid_property2',
|
||||
}
|
||||
|
||||
self.cmd = (
|
||||
baremetal_driver.ListBaremetalDriverRaidProperty(
|
||||
self.app, None))
|
||||
|
||||
def test_baremetal_driver_raid_property_list(self):
|
||||
arglist = ['fakedrivername']
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
(self.baremetal_mock.driver.
|
||||
raid_logical_disk_properties.assert_called_with(*arglist))
|
||||
|
||||
collist = ('Property', 'Description')
|
||||
self.assertEqual(collist, tuple(columns))
|
||||
|
||||
expected_data = [('RAIDProperty1', 'driver_raid_property1'),
|
||||
('RAIDProperty2', 'driver_raid_property2')]
|
||||
self.assertEqual(expected_data, data)
|
||||
|
||||
def test_baremetal_driver_raid_property_list_no_arg(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
self.assertRaises(oscutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestPassthruCallBaremetalDriver(TestBaremetalDriver):
|
||||
|
||||
def setUp(self):
|
||||
super(TestPassthruCallBaremetalDriver, self).setUp()
|
||||
|
||||
self.baremetal_mock.driver.vendor_passthru.return_value = (
|
||||
baremetal_fakes.BAREMETAL_DRIVER_PASSTHRU
|
||||
)
|
||||
|
||||
self.cmd = baremetal_driver.PassthruCallBaremetalDriver(self.app, None)
|
||||
|
||||
def test_baremetal_driver_passthru_call_with_min_args(self):
|
||||
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_driver_name,
|
||||
baremetal_fakes.baremetal_driver_passthru_method,
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('driver', baremetal_fakes.baremetal_driver_name),
|
||||
('method', baremetal_fakes.baremetal_driver_passthru_method),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = [
|
||||
baremetal_fakes.baremetal_driver_name,
|
||||
baremetal_fakes.baremetal_driver_passthru_method,
|
||||
]
|
||||
kwargs = {
|
||||
'http_method': 'POST',
|
||||
'args': {}
|
||||
}
|
||||
(self.baremetal_mock.driver.vendor_passthru.
|
||||
assert_called_once_with(*args, **kwargs))
|
||||
|
||||
def test_baremetal_driver_passthru_call_with_all_args(self):
|
||||
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_driver_name,
|
||||
baremetal_fakes.baremetal_driver_passthru_method,
|
||||
'--arg', 'arg1=val1', '--arg', 'arg2=val2',
|
||||
'--http-method', 'POST'
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('driver', baremetal_fakes.baremetal_driver_name),
|
||||
('method', baremetal_fakes.baremetal_driver_passthru_method),
|
||||
('arg', ['arg1=val1', 'arg2=val2']),
|
||||
('http_method', 'POST')
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = [
|
||||
baremetal_fakes.baremetal_driver_name,
|
||||
baremetal_fakes.baremetal_driver_passthru_method,
|
||||
]
|
||||
kwargs = {
|
||||
'http_method': 'POST',
|
||||
'args': {'arg1': 'val1', 'arg2': 'val2'}
|
||||
}
|
||||
(self.baremetal_mock.driver.vendor_passthru.
|
||||
assert_called_once_with(*args, **kwargs))
|
||||
|
||||
def test_baremetal_driver_passthru_call_no_arg(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
self.assertRaises(oscutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestPassthruListBaremetalDriver(TestBaremetalDriver):
|
||||
|
||||
def setUp(self):
|
||||
super(TestPassthruListBaremetalDriver, self).setUp()
|
||||
|
||||
self.baremetal_mock.driver.get_vendor_passthru_methods.return_value = (
|
||||
baremetal_fakes.BAREMETAL_DRIVER_PASSTHRU
|
||||
)
|
||||
self.cmd = baremetal_driver.PassthruListBaremetalDriver(self.app, None)
|
||||
|
||||
def test_baremetal_driver_passthru_list(self):
|
||||
arglist = ['fakedrivername']
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
args = ['fakedrivername']
|
||||
(self.baremetal_mock.driver.get_vendor_passthru_methods.
|
||||
assert_called_with(*args))
|
||||
|
||||
collist = (
|
||||
"Name",
|
||||
"Supported HTTP methods",
|
||||
"Async",
|
||||
"Description",
|
||||
"Response is attachment",
|
||||
)
|
||||
self.assertEqual(collist, tuple(columns))
|
||||
|
||||
datalist = (('lookup', 'POST', 'false', '', 'false'),)
|
||||
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_driver_passthru_list_no_arg(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
self.assertRaises(oscutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestShowBaremetalDriver(TestBaremetalDriver):
|
||||
|
||||
def setUp(self):
|
||||
super(TestShowBaremetalDriver, self).setUp()
|
||||
|
||||
self.baremetal_mock.driver.get.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_DRIVER),
|
||||
loaded=True))
|
||||
self.cmd = baremetal_driver.ShowBaremetalDriver(self.app, None)
|
||||
|
||||
def test_baremetal_driver_show(self):
|
||||
arglist = ['fakedrivername']
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
args = ['fakedrivername']
|
||||
self.baremetal_mock.driver.get.assert_called_with(*args)
|
||||
self.assertFalse(self.baremetal_mock.driver.properties.called)
|
||||
|
||||
collist = ('default_boot_interface', 'default_console_interface',
|
||||
'default_deploy_interface', 'default_inspect_interface',
|
||||
'default_management_interface', 'default_network_interface',
|
||||
'default_power_interface', 'default_raid_interface',
|
||||
'default_storage_interface', 'default_vendor_interface',
|
||||
'enabled_boot_interfaces', 'enabled_console_interfaces',
|
||||
'enabled_deploy_interfaces', 'enabled_inspect_interfaces',
|
||||
'enabled_management_interfaces',
|
||||
'enabled_network_interfaces', 'enabled_power_interfaces',
|
||||
'enabled_raid_interfaces', 'enabled_storage_interfaces',
|
||||
'enabled_vendor_interfaces', 'hosts', 'name', 'type')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = (
|
||||
baremetal_fakes.baremetal_driver_default_boot_if,
|
||||
baremetal_fakes.baremetal_driver_default_console_if,
|
||||
baremetal_fakes.baremetal_driver_default_deploy_if,
|
||||
baremetal_fakes.baremetal_driver_default_inspect_if,
|
||||
baremetal_fakes.baremetal_driver_default_management_if,
|
||||
baremetal_fakes.baremetal_driver_default_network_if,
|
||||
baremetal_fakes.baremetal_driver_default_power_if,
|
||||
baremetal_fakes.baremetal_driver_default_raid_if,
|
||||
baremetal_fakes.baremetal_driver_default_storage_if,
|
||||
baremetal_fakes.baremetal_driver_default_vendor_if,
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_boot_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_console_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_deploy_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_inspect_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_management_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_network_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_power_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_raid_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_storage_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_enabled_vendor_ifs),
|
||||
', '.join(baremetal_fakes.baremetal_driver_hosts),
|
||||
baremetal_fakes.baremetal_driver_name,
|
||||
baremetal_fakes.baremetal_driver_type)
|
||||
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_driver_show_no_arg(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
self.assertRaises(oscutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
File diff suppressed because it is too large
Load Diff
@ -1,760 +0,0 @@
|
||||
#
|
||||
# Copyright 2015 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
import copy
|
||||
|
||||
import mock
|
||||
from osc_lib.tests import utils as osctestutils
|
||||
from osc_lib import utils as oscutils
|
||||
|
||||
from ironicclient import exc
|
||||
from ironicclient.osc.v1 import baremetal_port
|
||||
from ironicclient.tests.unit.osc.v1 import fakes as baremetal_fakes
|
||||
|
||||
|
||||
class TestBaremetalPort(baremetal_fakes.TestBaremetal):
|
||||
|
||||
def setUp(self):
|
||||
super(TestBaremetalPort, self).setUp()
|
||||
|
||||
self.baremetal_mock = self.app.client_manager.baremetal
|
||||
self.baremetal_mock.reset_mock()
|
||||
|
||||
|
||||
class TestCreateBaremetalPort(TestBaremetalPort):
|
||||
def setUp(self):
|
||||
super(TestCreateBaremetalPort, self).setUp()
|
||||
|
||||
self.baremetal_mock.port.create.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_PORT),
|
||||
loaded=True,
|
||||
))
|
||||
|
||||
# Get the command object to test
|
||||
self.cmd = baremetal_port.CreateBaremetalPort(self.app, None)
|
||||
|
||||
def test_baremetal_port_create(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_port_address,
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('address', baremetal_fakes.baremetal_port_address),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = {
|
||||
'address': baremetal_fakes.baremetal_port_address,
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
}
|
||||
|
||||
self.baremetal_mock.port.create.assert_called_once_with(**args)
|
||||
|
||||
def test_baremetal_port_create_extras(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_port_address,
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--extra', 'key1=value1',
|
||||
'--extra', 'key2=value2'
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('address', baremetal_fakes.baremetal_port_address),
|
||||
('extra', ['key1=value1', 'key2=value2'])
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
args = {
|
||||
'address': baremetal_fakes.baremetal_port_address,
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'extra': baremetal_fakes.baremetal_port_extra
|
||||
}
|
||||
self.baremetal_mock.port.create.assert_called_once_with(**args)
|
||||
|
||||
def test_baremetal_port_create_no_address(self):
|
||||
arglist = ['--node', baremetal_fakes.baremetal_uuid]
|
||||
|
||||
verifylist = [('node_uuid', baremetal_fakes.baremetal_uuid)]
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_port_create_no_node(self):
|
||||
arglist = [baremetal_fakes.baremetal_port_address]
|
||||
|
||||
verifylist = [
|
||||
('address', baremetal_fakes.baremetal_port_address)
|
||||
]
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_port_create_no_args(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_port_create_uuid(self):
|
||||
port_uuid = "da6c8d2e-fbcd-457a-b2a7-cc5c775933af"
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_port_address,
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--uuid', port_uuid
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('address', baremetal_fakes.baremetal_port_address),
|
||||
('uuid', port_uuid)
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = {
|
||||
'address': baremetal_fakes.baremetal_port_address,
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'uuid': port_uuid
|
||||
}
|
||||
|
||||
self.baremetal_mock.port.create.assert_called_once_with(**args)
|
||||
|
||||
def _test_baremetal_port_create_llc_warning(self, additional_args,
|
||||
additional_verify_items):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_port_address,
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
]
|
||||
arglist.extend(additional_args)
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('address', baremetal_fakes.baremetal_port_address),
|
||||
]
|
||||
verifylist.extend(additional_verify_items)
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.log = mock.Mock()
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = {
|
||||
'address': baremetal_fakes.baremetal_port_address,
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'local_link_connection': {'switch_id': 'aa:bb:cc:dd:ee:ff',
|
||||
'port_id': 'eth0'}
|
||||
}
|
||||
|
||||
self.baremetal_mock.port.create.assert_called_once_with(**args)
|
||||
self.cmd.log.warning.assert_called()
|
||||
|
||||
def test_baremetal_port_create_llc_warning_some_deprecated(self):
|
||||
self._test_baremetal_port_create_llc_warning(
|
||||
additional_args=['-l', 'port_id=eth0', '--local-link-connection',
|
||||
'switch_id=aa:bb:cc:dd:ee:ff'],
|
||||
additional_verify_items=[
|
||||
('local_link_connection_deprecated', ['port_id=eth0']),
|
||||
('local_link_connection', ['switch_id=aa:bb:cc:dd:ee:ff'])]
|
||||
)
|
||||
|
||||
def test_baremetal_port_create_llc_warning_all_deprecated(self):
|
||||
self._test_baremetal_port_create_llc_warning(
|
||||
additional_args=['-l', 'port_id=eth0', '-l',
|
||||
'switch_id=aa:bb:cc:dd:ee:ff'],
|
||||
additional_verify_items=[('local_link_connection_deprecated',
|
||||
['port_id=eth0',
|
||||
'switch_id=aa:bb:cc:dd:ee:ff'])]
|
||||
)
|
||||
|
||||
def test_baremetal_port_create_portgroup_uuid(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_port_address,
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--port-group', baremetal_fakes.baremetal_portgroup_uuid,
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('address', baremetal_fakes.baremetal_port_address),
|
||||
('portgroup_uuid', baremetal_fakes.baremetal_portgroup_uuid)
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = {
|
||||
'address': baremetal_fakes.baremetal_port_address,
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'portgroup_uuid': baremetal_fakes.baremetal_portgroup_uuid
|
||||
}
|
||||
|
||||
self.baremetal_mock.port.create.assert_called_once_with(**args)
|
||||
|
||||
def test_baremetal_port_create_physical_network(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_port_address,
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--physical-network',
|
||||
baremetal_fakes.baremetal_port_physical_network,
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('address', baremetal_fakes.baremetal_port_address),
|
||||
('physical_network',
|
||||
baremetal_fakes.baremetal_port_physical_network)
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = {
|
||||
'address': baremetal_fakes.baremetal_port_address,
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'physical_network': baremetal_fakes.baremetal_port_physical_network
|
||||
}
|
||||
|
||||
self.baremetal_mock.port.create.assert_called_once_with(**args)
|
||||
|
||||
|
||||
class TestShowBaremetalPort(TestBaremetalPort):
|
||||
def setUp(self):
|
||||
super(TestShowBaremetalPort, self).setUp()
|
||||
|
||||
self.baremetal_mock.port.get.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_PORT),
|
||||
loaded=True))
|
||||
|
||||
self.baremetal_mock.port.get_by_address.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_PORT),
|
||||
loaded=True))
|
||||
|
||||
self.cmd = baremetal_port.ShowBaremetalPort(self.app, None)
|
||||
|
||||
def test_baremetal_port_show(self):
|
||||
arglist = ['zzz-zzzzzz-zzzz']
|
||||
verifylist = [('port', baremetal_fakes.baremetal_port_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = ['zzz-zzzzzz-zzzz']
|
||||
self.baremetal_mock.port.get.assert_called_with(*args, fields=None)
|
||||
|
||||
collist = (
|
||||
'address',
|
||||
'extra',
|
||||
'node_uuid',
|
||||
'uuid')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = (
|
||||
baremetal_fakes.baremetal_port_address,
|
||||
baremetal_fakes.baremetal_port_extra,
|
||||
baremetal_fakes.baremetal_uuid,
|
||||
baremetal_fakes.baremetal_port_uuid)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_port_show_address(self):
|
||||
|
||||
arglist = ['--address', baremetal_fakes.baremetal_port_address]
|
||||
verifylist = [('address', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
args = {'AA:BB:CC:DD:EE:FF'}
|
||||
self.baremetal_mock.port.get_by_address.assert_called_with(
|
||||
*args, fields=None)
|
||||
|
||||
def test_baremetal_port_show_no_port(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestBaremetalPortUnset(TestBaremetalPort):
|
||||
def setUp(self):
|
||||
super(TestBaremetalPortUnset, self).setUp()
|
||||
|
||||
self.baremetal_mock.port.update.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_PORT),
|
||||
loaded=True))
|
||||
|
||||
self.cmd = baremetal_port.UnsetBaremetalPort(self.app, None)
|
||||
|
||||
def test_baremetal_port_unset_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_port_unset_no_property(self):
|
||||
arglist = [baremetal_fakes.baremetal_port_uuid]
|
||||
verifylist = [('port', baremetal_fakes.baremetal_port_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.assertFalse(self.baremetal_mock.port.update.called)
|
||||
|
||||
def test_baremetal_port_unset_extra(self):
|
||||
arglist = ['port', '--extra', 'foo']
|
||||
verifylist = [('port', 'port'),
|
||||
('extra', ['foo'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.port.update.assert_called_once_with(
|
||||
'port',
|
||||
[{'path': '/extra/foo', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_port_unset_multiple_extras(self):
|
||||
arglist = ['port',
|
||||
'--extra', 'foo',
|
||||
'--extra', 'bar']
|
||||
verifylist = [('port', 'port'),
|
||||
('extra', ['foo', 'bar'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.port.update.assert_called_once_with(
|
||||
'port',
|
||||
[{'path': '/extra/foo', 'op': 'remove'},
|
||||
{'path': '/extra/bar', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_port_unset_portgroup_uuid(self):
|
||||
arglist = ['port', '--port-group']
|
||||
verifylist = [('port', 'port'),
|
||||
('portgroup', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.port.update.assert_called_once_with(
|
||||
'port',
|
||||
[{'path': '/portgroup_uuid', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_port_unset_physical_network(self):
|
||||
arglist = ['port', '--physical-network']
|
||||
verifylist = [('port', 'port'),
|
||||
('physical_network', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.port.update.assert_called_once_with(
|
||||
'port',
|
||||
[{'path': '/physical_network', 'op': 'remove'}])
|
||||
|
||||
|
||||
class TestBaremetalPortSet(TestBaremetalPort):
|
||||
def setUp(self):
|
||||
super(TestBaremetalPortSet, self).setUp()
|
||||
|
||||
self.baremetal_mock.port.update.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_PORT),
|
||||
loaded=True))
|
||||
|
||||
self.cmd = baremetal_port.SetBaremetalPort(self.app, None)
|
||||
|
||||
def test_baremetal_port_set_node_uuid(self):
|
||||
new_node_uuid = '1111-111111-1111'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
'--node', new_node_uuid]
|
||||
verifylist = [
|
||||
('port', baremetal_fakes.baremetal_port_uuid),
|
||||
('node_uuid', new_node_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.port.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
[{'path': '/node_uuid', 'value': new_node_uuid, 'op': 'add'}])
|
||||
|
||||
def test_baremetal_port_set_address(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
'--address', baremetal_fakes.baremetal_port_address]
|
||||
verifylist = [
|
||||
('port', baremetal_fakes.baremetal_port_uuid),
|
||||
('address', baremetal_fakes.baremetal_port_address)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.port.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
[{'path': '/address',
|
||||
'value': baremetal_fakes.baremetal_port_address,
|
||||
'op': 'add'}])
|
||||
|
||||
def test_baremetal_set_extra(self):
|
||||
arglist = ['port', '--extra', 'foo=bar']
|
||||
verifylist = [('port', 'port'),
|
||||
('extra', ['foo=bar'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.port.update.assert_called_once_with(
|
||||
'port',
|
||||
[{'path': '/extra/foo', 'value': 'bar', 'op': 'add'}])
|
||||
|
||||
def test_baremetal_port_set_portgroup_uuid(self):
|
||||
new_portgroup_uuid = '1111-111111-1111'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
'--port-group', new_portgroup_uuid]
|
||||
verifylist = [
|
||||
('port', baremetal_fakes.baremetal_port_uuid),
|
||||
('portgroup_uuid', new_portgroup_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.port.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
[{'path': '/portgroup_uuid', 'value': new_portgroup_uuid,
|
||||
'op': 'add'}])
|
||||
|
||||
def test_baremetal_set_local_link_connection(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
'--local-link-connection', 'switch_info=bar']
|
||||
verifylist = [('port', baremetal_fakes.baremetal_port_uuid),
|
||||
('local_link_connection', ['switch_info=bar'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.port.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
[{'path': '/local_link_connection/switch_info', 'value': 'bar',
|
||||
'op': 'add'}])
|
||||
|
||||
def test_baremetal_port_set_pxe_enabled(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
'--pxe-enabled']
|
||||
verifylist = [
|
||||
('port', baremetal_fakes.baremetal_port_uuid),
|
||||
('pxe_enabled', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.port.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
[{'path': '/pxe_enabled', 'value': 'True', 'op': 'add'}])
|
||||
|
||||
def test_baremetal_port_set_pxe_disabled(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
'--pxe-disabled']
|
||||
verifylist = [
|
||||
('port', baremetal_fakes.baremetal_port_uuid),
|
||||
('pxe_enabled', False)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.port.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
[{'path': '/pxe_enabled', 'value': 'False', 'op': 'add'}])
|
||||
|
||||
def test_baremetal_port_set_physical_network(self):
|
||||
new_physical_network = 'physnet2'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
'--physical-network', new_physical_network]
|
||||
verifylist = [
|
||||
('port', baremetal_fakes.baremetal_port_uuid),
|
||||
('physical_network', new_physical_network)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.port.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
[{'path': '/physical_network', 'value': new_physical_network,
|
||||
'op': 'add'}])
|
||||
|
||||
def test_baremetal_port_set_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_port_set_no_property(self):
|
||||
arglist = [baremetal_fakes.baremetal_port_uuid]
|
||||
verifylist = [('port', baremetal_fakes.baremetal_port_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.assertFalse(self.baremetal_mock.port.update.called)
|
||||
|
||||
|
||||
class TestBaremetalPortDelete(TestBaremetalPort):
|
||||
def setUp(self):
|
||||
super(TestBaremetalPortDelete, self).setUp()
|
||||
|
||||
self.baremetal_mock.port.get.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_PORT),
|
||||
loaded=True))
|
||||
|
||||
self.cmd = baremetal_port.DeleteBaremetalPort(self.app, None)
|
||||
|
||||
def test_baremetal_port_delete(self):
|
||||
arglist = ['zzz-zzzzzz-zzzz']
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
args = 'zzz-zzzzzz-zzzz'
|
||||
self.baremetal_mock.port.delete.assert_called_with(args)
|
||||
|
||||
def test_baremetal_port_delete_multiple(self):
|
||||
arglist = ['zzz-zzzzzz-zzzz', 'fakename']
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
args = ['zzz-zzzzzz-zzzz', 'fakename']
|
||||
self.baremetal_mock.port.delete.has_calls(
|
||||
[mock.call(x) for x in args])
|
||||
self.assertEqual(2, self.baremetal_mock.port.delete.call_count)
|
||||
|
||||
def test_baremetal_port_delete_multiple_with_fail(self):
|
||||
arglist = ['zzz-zzzzzz-zzzz', 'badname']
|
||||
verifylist = []
|
||||
|
||||
self.baremetal_mock.port.delete.side_effect = ['', exc.ClientException]
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.assertRaises(exc.ClientException,
|
||||
self.cmd.take_action,
|
||||
parsed_args)
|
||||
|
||||
args = ['zzz-zzzzzz-zzzz', 'badname']
|
||||
self.baremetal_mock.port.delete.has_calls(
|
||||
[mock.call(x) for x in args])
|
||||
self.assertEqual(2, self.baremetal_mock.port.delete.call_count)
|
||||
|
||||
def test_baremetal_port_delete_no_port(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestBaremetalPortList(TestBaremetalPort):
|
||||
def setUp(self):
|
||||
super(TestBaremetalPortList, self).setUp()
|
||||
|
||||
self.baremetal_mock.port.list.return_value = [
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.BAREMETAL_PORT),
|
||||
loaded=True)
|
||||
]
|
||||
|
||||
self.cmd = baremetal_port.ListBaremetalPort(self.app, None)
|
||||
|
||||
def test_baremetal_port_list(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None}
|
||||
self.baremetal_mock.port.list.assert_called_with(**kwargs)
|
||||
|
||||
collist = (
|
||||
"UUID",
|
||||
"Address")
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
baremetal_fakes.baremetal_port_address
|
||||
), )
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_port_list_address(self):
|
||||
arglist = ['--address', baremetal_fakes.baremetal_port_address]
|
||||
verifylist = [('address', baremetal_fakes.baremetal_port_address)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'address': baremetal_fakes.baremetal_port_address,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
}
|
||||
self.baremetal_mock.port.list.assert_called_with(**kwargs)
|
||||
|
||||
def test_baremetal_port_list_node(self):
|
||||
arglist = ['--node', baremetal_fakes.baremetal_uuid]
|
||||
verifylist = [('node', baremetal_fakes.baremetal_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'node': baremetal_fakes.baremetal_uuid,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
}
|
||||
self.baremetal_mock.port.list.assert_called_with(**kwargs)
|
||||
|
||||
def test_baremetal_port_list_portgroup(self):
|
||||
arglist = ['--port-group', baremetal_fakes.baremetal_portgroup_uuid]
|
||||
verifylist = [('portgroup', baremetal_fakes.baremetal_portgroup_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'portgroup': baremetal_fakes.baremetal_portgroup_uuid,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
}
|
||||
self.baremetal_mock.port.list.assert_called_with(**kwargs)
|
||||
|
||||
def test_baremetal_port_list_long(self):
|
||||
arglist = ['--long']
|
||||
verifylist = [('detail', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'detail': True,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
}
|
||||
self.baremetal_mock.port.list.assert_called_with(**kwargs)
|
||||
|
||||
collist = ('UUID', 'Address', 'Created At', 'Extra', 'Node UUID',
|
||||
'Local Link Connection', 'Portgroup UUID',
|
||||
'PXE boot enabled', 'Physical Network', 'Updated At',
|
||||
'Internal Info')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((
|
||||
baremetal_fakes.baremetal_port_uuid,
|
||||
baremetal_fakes.baremetal_port_address,
|
||||
'',
|
||||
oscutils.format_dict(baremetal_fakes.baremetal_port_extra),
|
||||
baremetal_fakes.baremetal_uuid,
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
''
|
||||
), )
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_port_list_fields(self):
|
||||
arglist = ['--fields', 'uuid', 'address']
|
||||
verifylist = [('fields', [['uuid', 'address']])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'detail': False,
|
||||
'fields': ('uuid', 'address')
|
||||
}
|
||||
self.baremetal_mock.port.list.assert_called_with(**kwargs)
|
||||
|
||||
def test_baremetal_port_list_fields_multiple(self):
|
||||
arglist = ['--fields', 'uuid', 'address', '--fields', 'extra']
|
||||
verifylist = [('fields', [['uuid', 'address'], ['extra']])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'detail': False,
|
||||
'fields': ('uuid', 'address', 'extra')
|
||||
}
|
||||
self.baremetal_mock.port.list.assert_called_with(**kwargs)
|
||||
|
||||
def test_baremetal_port_list_invalid_fields(self):
|
||||
arglist = ['--fields', 'uuid', 'invalid']
|
||||
verifylist = [('fields', [['uuid', 'invalid']])]
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
@ -1,747 +0,0 @@
|
||||
#
|
||||
# Copyright 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
import copy
|
||||
|
||||
import mock
|
||||
from osc_lib.tests import utils as osctestutils
|
||||
|
||||
from ironicclient.osc.v1 import baremetal_portgroup
|
||||
from ironicclient.tests.unit.osc.v1 import fakes as baremetal_fakes
|
||||
|
||||
|
||||
class TestBaremetalPortGroup(baremetal_fakes.TestBaremetal):
|
||||
|
||||
def setUp(self):
|
||||
super(TestBaremetalPortGroup, self).setUp()
|
||||
|
||||
self.baremetal_mock = self.app.client_manager.baremetal
|
||||
self.baremetal_mock.reset_mock()
|
||||
|
||||
|
||||
class TestCreateBaremetalPortGroup(TestBaremetalPortGroup):
|
||||
|
||||
def setUp(self):
|
||||
super(TestCreateBaremetalPortGroup, self).setUp()
|
||||
|
||||
self.baremetal_mock.portgroup.create.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.PORTGROUP),
|
||||
loaded=True,
|
||||
))
|
||||
|
||||
# Get the command object to test
|
||||
self.cmd = baremetal_portgroup.CreateBaremetalPortGroup(self.app, None)
|
||||
|
||||
def test_baremetal_portgroup_create(self):
|
||||
arglist = [
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = {
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
}
|
||||
|
||||
self.baremetal_mock.portgroup.create.assert_called_once_with(**args)
|
||||
|
||||
def test_baremetal_portgroup_create_name_address_uuid(self):
|
||||
arglist = [
|
||||
'--address', baremetal_fakes.baremetal_portgroup_address,
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--name', baremetal_fakes.baremetal_portgroup_name,
|
||||
'--uuid', baremetal_fakes.baremetal_portgroup_uuid,
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('address', baremetal_fakes.baremetal_portgroup_address),
|
||||
('name', baremetal_fakes.baremetal_portgroup_name),
|
||||
('uuid', baremetal_fakes.baremetal_portgroup_uuid),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = {
|
||||
'address': baremetal_fakes.baremetal_portgroup_address,
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'name': baremetal_fakes.baremetal_portgroup_name,
|
||||
'uuid': baremetal_fakes.baremetal_portgroup_uuid,
|
||||
}
|
||||
|
||||
self.baremetal_mock.portgroup.create.assert_called_once_with(**args)
|
||||
|
||||
def test_baremetal_portgroup_create_support_standalone_ports(self):
|
||||
arglist = [
|
||||
'--address', baremetal_fakes.baremetal_portgroup_address,
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--support-standalone-ports'
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('address', baremetal_fakes.baremetal_portgroup_address),
|
||||
('support_standalone_ports', True),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = {
|
||||
'address': baremetal_fakes.baremetal_portgroup_address,
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'standalone_ports_supported': True,
|
||||
}
|
||||
|
||||
self.baremetal_mock.portgroup.create.assert_called_once_with(**args)
|
||||
|
||||
def test_baremetal_portgroup_create_unsupport_standalone_ports(self):
|
||||
arglist = [
|
||||
'--address', baremetal_fakes.baremetal_portgroup_address,
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--unsupport-standalone-ports'
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('address', baremetal_fakes.baremetal_portgroup_address),
|
||||
('unsupport_standalone_ports', True),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = {
|
||||
'address': baremetal_fakes.baremetal_portgroup_address,
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'standalone_ports_supported': False,
|
||||
}
|
||||
|
||||
self.baremetal_mock.portgroup.create.assert_called_once_with(**args)
|
||||
|
||||
def test_baremetal_portgroup_create_name_extras(self):
|
||||
arglist = [
|
||||
'--address', baremetal_fakes.baremetal_portgroup_address,
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--name', baremetal_fakes.baremetal_portgroup_name,
|
||||
'--extra', 'key1=value1',
|
||||
'--extra', 'key2=value2'
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('address', baremetal_fakes.baremetal_portgroup_address),
|
||||
('name', baremetal_fakes.baremetal_portgroup_name),
|
||||
('extra', ['key1=value1', 'key2=value2'])
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = {
|
||||
'address': baremetal_fakes.baremetal_portgroup_address,
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'name': baremetal_fakes.baremetal_portgroup_name,
|
||||
'extra': baremetal_fakes.baremetal_portgroup_extra
|
||||
}
|
||||
|
||||
self.baremetal_mock.portgroup.create.assert_called_once_with(**args)
|
||||
|
||||
def test_baremetal_portgroup_create_mode_properties(self):
|
||||
arglist = [
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--mode', baremetal_fakes.baremetal_portgroup_mode,
|
||||
'--property', 'key1=value11',
|
||||
'--property', 'key2=value22'
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('mode', baremetal_fakes.baremetal_portgroup_mode),
|
||||
('properties', ['key1=value11', 'key2=value22'])
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
# DisplayCommandBase.take_action() returns two tuples
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = {
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'mode': baremetal_fakes.baremetal_portgroup_mode,
|
||||
'properties': baremetal_fakes.baremetal_portgroup_properties
|
||||
}
|
||||
|
||||
self.baremetal_mock.portgroup.create.assert_called_once_with(**args)
|
||||
|
||||
def test_baremetal_portgroup_create_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestShowBaremetalPortGroup(TestBaremetalPortGroup):
|
||||
|
||||
def setUp(self):
|
||||
super(TestShowBaremetalPortGroup, self).setUp()
|
||||
|
||||
self.baremetal_mock.portgroup.get.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.PORTGROUP),
|
||||
loaded=True))
|
||||
|
||||
self.baremetal_mock.portgroup.get_by_address.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.PORTGROUP),
|
||||
loaded=True))
|
||||
|
||||
self.cmd = baremetal_portgroup.ShowBaremetalPortGroup(self.app, None)
|
||||
|
||||
def test_baremetal_portgroup_show(self):
|
||||
arglist = ['ppp-gggggg-pppp']
|
||||
verifylist = [('portgroup', baremetal_fakes.baremetal_portgroup_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Set expected values
|
||||
args = ['ppp-gggggg-pppp']
|
||||
self.baremetal_mock.portgroup.get.assert_called_with(*args,
|
||||
fields=None)
|
||||
|
||||
collist = ('address', 'extra', 'mode', 'name', 'node_uuid',
|
||||
'properties', 'uuid')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = (
|
||||
baremetal_fakes.baremetal_portgroup_address,
|
||||
baremetal_fakes.baremetal_portgroup_extra,
|
||||
baremetal_fakes.baremetal_portgroup_mode,
|
||||
baremetal_fakes.baremetal_portgroup_name,
|
||||
baremetal_fakes.baremetal_uuid,
|
||||
baremetal_fakes.baremetal_portgroup_properties,
|
||||
baremetal_fakes.baremetal_portgroup_uuid,
|
||||
)
|
||||
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_portgroup_show_address(self):
|
||||
arglist = ['--address', baremetal_fakes.baremetal_portgroup_address]
|
||||
verifylist = [('address', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
args = {baremetal_fakes.baremetal_portgroup_address}
|
||||
self.baremetal_mock.portgroup.get_by_address.assert_called_with(
|
||||
*args, fields=None)
|
||||
|
||||
def test_baremetal_portgroup_show_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestBaremetalPortGroupList(TestBaremetalPortGroup):
|
||||
def setUp(self):
|
||||
super(TestBaremetalPortGroupList, self).setUp()
|
||||
|
||||
self.baremetal_mock.portgroup.list.return_value = [
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.PORTGROUP),
|
||||
loaded=True)
|
||||
]
|
||||
self.cmd = baremetal_portgroup.ListBaremetalPortGroup(self.app, None)
|
||||
|
||||
def test_baremetal_portgroup_list(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None}
|
||||
self.baremetal_mock.portgroup.list.assert_called_with(**kwargs)
|
||||
|
||||
collist = (
|
||||
"UUID",
|
||||
"Address",
|
||||
"Name")
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_portgroup_uuid,
|
||||
baremetal_fakes.baremetal_portgroup_address,
|
||||
baremetal_fakes.baremetal_portgroup_name),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_portgroup_list_address(self):
|
||||
arglist = ['--address', baremetal_fakes.baremetal_portgroup_address]
|
||||
verifylist = [('address', baremetal_fakes.baremetal_portgroup_address)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'address': baremetal_fakes.baremetal_portgroup_address,
|
||||
'marker': None,
|
||||
'limit': None}
|
||||
self.baremetal_mock.portgroup.list.assert_called_with(**kwargs)
|
||||
|
||||
collist = (
|
||||
"UUID",
|
||||
"Address",
|
||||
"Name")
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_portgroup_uuid,
|
||||
baremetal_fakes.baremetal_portgroup_address,
|
||||
baremetal_fakes.baremetal_portgroup_name),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_portgroup_list_node(self):
|
||||
arglist = ['--node', baremetal_fakes.baremetal_uuid]
|
||||
verifylist = [('node', baremetal_fakes.baremetal_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'node': baremetal_fakes.baremetal_uuid,
|
||||
'marker': None,
|
||||
'limit': None}
|
||||
self.baremetal_mock.portgroup.list.assert_called_with(**kwargs)
|
||||
|
||||
collist = (
|
||||
"UUID",
|
||||
"Address",
|
||||
"Name")
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_portgroup_uuid,
|
||||
baremetal_fakes.baremetal_portgroup_address,
|
||||
baremetal_fakes.baremetal_portgroup_name),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_portgroup_list_long(self):
|
||||
arglist = ['--long']
|
||||
verifylist = [('detail', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'detail': True,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
}
|
||||
self.baremetal_mock.portgroup.list.assert_called_with(**kwargs)
|
||||
|
||||
collist = ('UUID', 'Address', 'Created At', 'Extra',
|
||||
'Standalone Ports Supported', 'Node UUID', 'Name',
|
||||
'Updated At', 'Internal Info', 'Mode', 'Properties')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_portgroup_uuid,
|
||||
baremetal_fakes.baremetal_portgroup_address,
|
||||
'',
|
||||
baremetal_fakes.baremetal_portgroup_extra,
|
||||
'',
|
||||
baremetal_fakes.baremetal_uuid,
|
||||
baremetal_fakes.baremetal_portgroup_name,
|
||||
'',
|
||||
'',
|
||||
baremetal_fakes.baremetal_portgroup_mode,
|
||||
baremetal_fakes.baremetal_portgroup_properties),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_portgroup_list_fields(self):
|
||||
arglist = ['--fields', 'uuid', 'address']
|
||||
verifylist = [('fields', [['uuid', 'address']])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'detail': False,
|
||||
'fields': ('uuid', 'address')
|
||||
}
|
||||
self.baremetal_mock.portgroup.list.assert_called_with(**kwargs)
|
||||
|
||||
def test_baremetal_portgroup_list_fields_multiple(self):
|
||||
arglist = ['--fields', 'uuid', 'address', '--fields', 'extra']
|
||||
verifylist = [('fields', [['uuid', 'address'], ['extra']])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'detail': False,
|
||||
'fields': ('uuid', 'address', 'extra')
|
||||
}
|
||||
self.baremetal_mock.portgroup.list.assert_called_with(**kwargs)
|
||||
|
||||
def test_baremetal_portgroup_list_invalid_fields(self):
|
||||
arglist = ['--fields', 'uuid', 'invalid']
|
||||
verifylist = [('fields', [['uuid', 'invalid']])]
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestBaremetalPortGroupDelete(TestBaremetalPortGroup):
|
||||
|
||||
def setUp(self):
|
||||
super(TestBaremetalPortGroupDelete, self).setUp()
|
||||
|
||||
self.baremetal_mock.portgroup.get.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.PORTGROUP),
|
||||
loaded=True))
|
||||
|
||||
self.cmd = baremetal_portgroup.DeleteBaremetalPortGroup(self.app, None)
|
||||
|
||||
def test_baremetal_portgroup_delete(self):
|
||||
arglist = [baremetal_fakes.baremetal_portgroup_uuid]
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
args = baremetal_fakes.baremetal_portgroup_uuid
|
||||
self.baremetal_mock.portgroup.delete.assert_called_with(args)
|
||||
|
||||
def test_baremetal_portgroup_delete_multiple(self):
|
||||
arglist = [baremetal_fakes.baremetal_portgroup_uuid,
|
||||
baremetal_fakes.baremetal_portgroup_name]
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
args = [baremetal_fakes.baremetal_portgroup_uuid,
|
||||
baremetal_fakes.baremetal_portgroup_name]
|
||||
self.baremetal_mock.portgroup.delete.has_calls(
|
||||
[mock.call(x) for x in args])
|
||||
self.assertEqual(2, self.baremetal_mock.portgroup.delete.call_count)
|
||||
|
||||
def test_baremetal_portgroup_delete_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestBaremetalPortGroupSet(TestBaremetalPortGroup):
|
||||
def setUp(self):
|
||||
super(TestBaremetalPortGroupSet, self).setUp()
|
||||
|
||||
self.baremetal_mock.portgroup.update.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.PORTGROUP),
|
||||
loaded=True))
|
||||
|
||||
self.cmd = baremetal_portgroup.SetBaremetalPortGroup(self.app, None)
|
||||
|
||||
def test_baremetal_portgroup_set_name(self):
|
||||
new_portgroup_name = 'New-Portgroup-name'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_portgroup_uuid,
|
||||
'--name', new_portgroup_name]
|
||||
verifylist = [
|
||||
('portgroup', baremetal_fakes.baremetal_portgroup_uuid),
|
||||
('name', new_portgroup_name)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_portgroup_uuid,
|
||||
[{'path': '/name', 'value': new_portgroup_name, 'op': 'add'}])
|
||||
|
||||
def test_baremetal_portgroup_set_address(self):
|
||||
new_portgroup_address = '00:22:44:66:88:00'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_portgroup_uuid,
|
||||
'--address', new_portgroup_address]
|
||||
verifylist = [
|
||||
('portgroup', baremetal_fakes.baremetal_portgroup_uuid),
|
||||
('address', new_portgroup_address)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_portgroup_uuid,
|
||||
[{'path': '/address', 'value': new_portgroup_address,
|
||||
'op': 'add'}])
|
||||
|
||||
def test_baremetal_portgroup_set_mode(self):
|
||||
new_portgroup_mode = '802.3ad'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_portgroup_uuid,
|
||||
'--mode', new_portgroup_mode]
|
||||
verifylist = [
|
||||
('portgroup', baremetal_fakes.baremetal_portgroup_uuid),
|
||||
('mode', new_portgroup_mode)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_portgroup_uuid,
|
||||
[{'path': '/mode', 'value': new_portgroup_mode,
|
||||
'op': 'add'}])
|
||||
|
||||
def test_baremetal_portgroup_set_node_uuid(self):
|
||||
new_node_uuid = 'nnnnnn-uuuuuuuu'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_portgroup_uuid,
|
||||
'--node', new_node_uuid]
|
||||
verifylist = [
|
||||
('portgroup', baremetal_fakes.baremetal_portgroup_uuid),
|
||||
('node_uuid', new_node_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_portgroup_uuid,
|
||||
[{'path': '/node_uuid', 'value': new_node_uuid,
|
||||
'op': 'add'}])
|
||||
|
||||
def test_baremetal_portgroup_set_support_standalone_ports(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_portgroup_uuid,
|
||||
'--support-standalone-ports']
|
||||
verifylist = [
|
||||
('portgroup', baremetal_fakes.baremetal_portgroup_uuid),
|
||||
('support_standalone_ports', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_portgroup_uuid,
|
||||
[{'path': '/standalone_ports_supported', 'value': 'True',
|
||||
'op': 'add'}])
|
||||
|
||||
def test_baremetal_portgroup_set_unsupport_standalone_ports(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_portgroup_uuid,
|
||||
'--unsupport-standalone-ports']
|
||||
verifylist = [
|
||||
('portgroup', baremetal_fakes.baremetal_portgroup_uuid),
|
||||
('unsupport_standalone_ports', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_portgroup_uuid,
|
||||
[{'path': '/standalone_ports_supported', 'value': 'False',
|
||||
'op': 'add'}])
|
||||
|
||||
def test_baremetal_set_extra(self):
|
||||
arglist = ['portgroup', '--extra', 'foo=bar']
|
||||
verifylist = [('portgroup', 'portgroup'),
|
||||
('extra', ['foo=bar'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
'portgroup',
|
||||
[{'path': '/extra/foo', 'value': 'bar', 'op': 'add'}])
|
||||
|
||||
def test_baremetal_portgroup_set_multiple_extras(self):
|
||||
arglist = ['portgroup',
|
||||
'--extra', 'key1=val1',
|
||||
'--extra', 'key2=val2']
|
||||
verifylist = [('portgroup', 'portgroup'),
|
||||
('extra', ['key1=val1', 'key2=val2'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
'portgroup',
|
||||
[{'path': '/extra/key1', 'value': 'val1', 'op': 'add'},
|
||||
{'path': '/extra/key2', 'value': 'val2', 'op': 'add'}])
|
||||
|
||||
def test_baremetal_portgroup_set_multiple_properties(self):
|
||||
arglist = ['portgroup',
|
||||
'--property', 'key3=val3',
|
||||
'--property', 'key4=val4']
|
||||
verifylist = [('portgroup', 'portgroup'),
|
||||
('properties', ['key3=val3', 'key4=val4'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
'portgroup',
|
||||
[{'path': '/properties/key3', 'value': 'val3', 'op': 'add'},
|
||||
{'path': '/properties/key4', 'value': 'val4', 'op': 'add'}])
|
||||
|
||||
def test_baremetal_portgroup_set_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_portgroup_set_no_property(self):
|
||||
uuid = baremetal_fakes.baremetal_portgroup_uuid
|
||||
arglist = [uuid]
|
||||
verifylist = [('portgroup', uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.assertFalse(self.baremetal_mock.portgroup.update.called)
|
||||
|
||||
|
||||
class TestBaremetalPortGroupUnset(TestBaremetalPortGroup):
|
||||
def setUp(self):
|
||||
super(TestBaremetalPortGroupUnset, self).setUp()
|
||||
|
||||
self.baremetal_mock.portgroup.update.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.PORTGROUP),
|
||||
loaded=True))
|
||||
|
||||
self.cmd = baremetal_portgroup.UnsetBaremetalPortGroup(self.app, None)
|
||||
|
||||
def test_baremetal_portgroup_unset_extra(self):
|
||||
arglist = ['portgroup', '--extra', 'key1']
|
||||
verifylist = [('portgroup', 'portgroup'),
|
||||
('extra', ['key1'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
'portgroup',
|
||||
[{'path': '/extra/key1', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_portgroup_unset_name(self):
|
||||
arglist = ['portgroup', '--name']
|
||||
verifylist = [('name', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
'portgroup',
|
||||
[{'path': '/name', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_portgroup_unset_address(self):
|
||||
arglist = ['portgroup', '--address']
|
||||
verifylist = [('address', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
'portgroup',
|
||||
[{'path': '/address', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_portgroup_unset_multiple_extras(self):
|
||||
arglist = ['portgroup',
|
||||
'--extra', 'key1',
|
||||
'--extra', 'key2']
|
||||
verifylist = [('portgroup', 'portgroup'),
|
||||
('extra', ['key1', 'key2'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
'portgroup',
|
||||
[{'path': '/extra/key1', 'op': 'remove'},
|
||||
{'path': '/extra/key2', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_portgroup_unset_multiple_properties(self):
|
||||
arglist = ['portgroup',
|
||||
'--property', 'key1',
|
||||
'--property', 'key2']
|
||||
verifylist = [('portgroup', 'portgroup'),
|
||||
('properties', ['key1', 'key2'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.portgroup.update.assert_called_once_with(
|
||||
'portgroup',
|
||||
[{'path': '/properties/key1', 'op': 'remove'},
|
||||
{'path': '/properties/key2', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_portgroup_unset_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_portgroup_unset_no_property(self):
|
||||
uuid = baremetal_fakes.baremetal_portgroup_uuid
|
||||
arglist = [uuid]
|
||||
verifylist = [('portgroup', uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.assertFalse(self.baremetal_mock.portgroup.update.called)
|
@ -1,813 +0,0 @@
|
||||
# Copyright 2017 FUJITSU LIMITED
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import mock
|
||||
from osc_lib.tests import utils as osctestutils
|
||||
|
||||
from ironicclient import exc
|
||||
from ironicclient.osc.v1 import baremetal_volume_connector as bm_vol_connector
|
||||
from ironicclient.tests.unit.osc.v1 import fakes as baremetal_fakes
|
||||
|
||||
|
||||
class TestBaremetalVolumeConnector(baremetal_fakes.TestBaremetal):
|
||||
|
||||
def setUp(self):
|
||||
super(TestBaremetalVolumeConnector, self).setUp()
|
||||
|
||||
self.baremetal_mock = self.app.client_manager.baremetal
|
||||
self.baremetal_mock.reset_mock()
|
||||
|
||||
|
||||
class TestCreateBaremetalVolumeConnector(TestBaremetalVolumeConnector):
|
||||
|
||||
def setUp(self):
|
||||
super(TestCreateBaremetalVolumeConnector, self).setUp()
|
||||
|
||||
self.baremetal_mock.volume_connector.create.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.VOLUME_CONNECTOR),
|
||||
loaded=True,
|
||||
))
|
||||
|
||||
# Get the command object to test
|
||||
self.cmd = (
|
||||
bm_vol_connector.CreateBaremetalVolumeConnector(self.app, None))
|
||||
|
||||
def test_baremetal_volume_connector_create(self):
|
||||
arglist = [
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--type', baremetal_fakes.baremetal_volume_connector_type,
|
||||
'--connector-id',
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
'--uuid', baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('type', baremetal_fakes.baremetal_volume_connector_type),
|
||||
('connector_id',
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id),
|
||||
('uuid', baremetal_fakes.baremetal_volume_connector_uuid),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
args = {
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'type': baremetal_fakes.baremetal_volume_connector_type,
|
||||
'connector_id':
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
'uuid': baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
}
|
||||
|
||||
self.baremetal_mock.volume_connector.create.assert_called_once_with(
|
||||
**args)
|
||||
|
||||
def test_baremetal_volume_connector_create_without_uuid(self):
|
||||
arglist = [
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--type', baremetal_fakes.baremetal_volume_connector_type,
|
||||
'--connector-id',
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('type', baremetal_fakes.baremetal_volume_connector_type),
|
||||
('connector_id',
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
args = {
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'type': baremetal_fakes.baremetal_volume_connector_type,
|
||||
'connector_id':
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
}
|
||||
|
||||
self.baremetal_mock.volume_connector.create.assert_called_once_with(
|
||||
**args)
|
||||
|
||||
def test_baremetal_volume_connector_create_extras(self):
|
||||
arglist = [
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--type', baremetal_fakes.baremetal_volume_connector_type,
|
||||
'--connector-id',
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
'--extra', 'key1=value1',
|
||||
'--extra', 'key2=value2',
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('type', baremetal_fakes.baremetal_volume_connector_type),
|
||||
('connector_id',
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id),
|
||||
('extra', ['key1=value1', 'key2=value2'])
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
args = {
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'type': baremetal_fakes.baremetal_volume_connector_type,
|
||||
'connector_id':
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
'extra': baremetal_fakes.baremetal_volume_connector_extra,
|
||||
}
|
||||
|
||||
self.baremetal_mock.volume_connector.create.assert_called_once_with(
|
||||
**args)
|
||||
|
||||
def test_baremetal_volume_connector_create_invalid_type(self):
|
||||
arglist = [
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--type', 'invalid',
|
||||
'--connector-id',
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
'--uuid', baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
]
|
||||
verifylist = None
|
||||
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_connector_create_missing_node(self):
|
||||
arglist = [
|
||||
'--type', baremetal_fakes.baremetal_volume_connector_type,
|
||||
'--connector-id',
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
'--uuid', baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
]
|
||||
verifylist = None
|
||||
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_connector_create_missing_type(self):
|
||||
arglist = [
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--connector-id',
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
'--uuid', baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
]
|
||||
verifylist = None
|
||||
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_connector_create_missing_connector_id(self):
|
||||
arglist = [
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--type', baremetal_fakes.baremetal_volume_connector_type,
|
||||
'--uuid', baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
]
|
||||
verifylist = None
|
||||
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestShowBaremetalVolumeConnector(TestBaremetalVolumeConnector):
|
||||
|
||||
def setUp(self):
|
||||
super(TestShowBaremetalVolumeConnector, self).setUp()
|
||||
|
||||
self.baremetal_mock.volume_connector.get.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.VOLUME_CONNECTOR),
|
||||
loaded=True))
|
||||
|
||||
self.cmd = (
|
||||
bm_vol_connector.ShowBaremetalVolumeConnector(self.app, None))
|
||||
|
||||
def test_baremetal_volume_connector_show(self):
|
||||
arglist = ['vvv-cccccc-vvvv']
|
||||
verifylist = [('volume_connector',
|
||||
baremetal_fakes.baremetal_volume_connector_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
args = ['vvv-cccccc-vvvv']
|
||||
self.baremetal_mock.volume_connector.get.assert_called_once_with(
|
||||
*args, fields=None)
|
||||
collist = ('connector_id', 'extra', 'node_uuid', 'type', 'uuid')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = (
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
baremetal_fakes.baremetal_volume_connector_extra,
|
||||
baremetal_fakes.baremetal_uuid,
|
||||
baremetal_fakes.baremetal_volume_connector_type,
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_connector_show_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_connector_show_fields(self):
|
||||
arglist = ['vvv-cccccc-vvvv', '--fields', 'uuid', 'connector_id']
|
||||
verifylist = [('fields', [['uuid', 'connector_id']]),
|
||||
('volume_connector',
|
||||
baremetal_fakes.baremetal_volume_connector_uuid)]
|
||||
|
||||
fake_vc = copy.deepcopy(baremetal_fakes.VOLUME_CONNECTOR)
|
||||
fake_vc.pop('type')
|
||||
fake_vc.pop('extra')
|
||||
fake_vc.pop('node_uuid')
|
||||
self.baremetal_mock.volume_connector.get.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
fake_vc,
|
||||
loaded=True))
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
args = ['vvv-cccccc-vvvv']
|
||||
fields = ['uuid', 'connector_id']
|
||||
self.baremetal_mock.volume_connector.get.assert_called_once_with(
|
||||
*args, fields=fields)
|
||||
collist = ('connector_id', 'uuid')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = (
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_connector_show_fields_multiple(self):
|
||||
arglist = ['vvv-cccccc-vvvv', '--fields', 'uuid', 'connector_id',
|
||||
'--fields', 'type']
|
||||
verifylist = [('fields', [['uuid', 'connector_id'], ['type']]),
|
||||
('volume_connector',
|
||||
baremetal_fakes.baremetal_volume_connector_uuid)]
|
||||
|
||||
fake_vc = copy.deepcopy(baremetal_fakes.VOLUME_CONNECTOR)
|
||||
fake_vc.pop('extra')
|
||||
fake_vc.pop('node_uuid')
|
||||
self.baremetal_mock.volume_connector.get.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
fake_vc,
|
||||
loaded=True))
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
args = ['vvv-cccccc-vvvv']
|
||||
fields = ['uuid', 'connector_id', 'type']
|
||||
self.baremetal_mock.volume_connector.get.assert_called_once_with(
|
||||
*args, fields=fields)
|
||||
collist = ('connector_id', 'type', 'uuid')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = (
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
baremetal_fakes.baremetal_volume_connector_type,
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
|
||||
class TestListBaremetalVolumeConnector(TestBaremetalVolumeConnector):
|
||||
def setUp(self):
|
||||
super(TestListBaremetalVolumeConnector, self).setUp()
|
||||
|
||||
self.baremetal_mock.volume_connector.list.return_value = [
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.VOLUME_CONNECTOR),
|
||||
loaded=True)
|
||||
]
|
||||
self.cmd = (
|
||||
bm_vol_connector.ListBaremetalVolumeConnector(self.app, None))
|
||||
|
||||
def test_baremetal_volume_connector_list(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None}
|
||||
self.baremetal_mock.volume_connector.list.assert_called_once_with(
|
||||
**kwargs)
|
||||
|
||||
collist = (
|
||||
"UUID",
|
||||
"Node UUID",
|
||||
"Type",
|
||||
"Connector ID")
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
baremetal_fakes.baremetal_uuid,
|
||||
baremetal_fakes.baremetal_volume_connector_type,
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_connector_list_node(self):
|
||||
arglist = ['--node', baremetal_fakes.baremetal_uuid]
|
||||
verifylist = [('node', baremetal_fakes.baremetal_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'node': baremetal_fakes.baremetal_uuid,
|
||||
'marker': None,
|
||||
'limit': None}
|
||||
self.baremetal_mock.volume_connector.list.assert_called_once_with(
|
||||
**kwargs)
|
||||
|
||||
collist = (
|
||||
"UUID",
|
||||
"Node UUID",
|
||||
"Type",
|
||||
"Connector ID")
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
baremetal_fakes.baremetal_uuid,
|
||||
baremetal_fakes.baremetal_volume_connector_type,
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_connector_list_long(self):
|
||||
arglist = ['--long']
|
||||
verifylist = [('detail', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'detail': True,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
}
|
||||
self.baremetal_mock.volume_connector.list.assert_called_with(**kwargs)
|
||||
|
||||
collist = ('UUID', 'Node UUID', 'Type', 'Connector ID', 'Extra',
|
||||
'Created At', 'Updated At')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
baremetal_fakes.baremetal_uuid,
|
||||
baremetal_fakes.baremetal_volume_connector_type,
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
baremetal_fakes.baremetal_volume_connector_extra,
|
||||
'',
|
||||
''),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_connector_list_fields(self):
|
||||
arglist = ['--fields', 'uuid', 'connector_id']
|
||||
verifylist = [('fields', [['uuid', 'connector_id']])]
|
||||
|
||||
fake_vc = copy.deepcopy(baremetal_fakes.VOLUME_CONNECTOR)
|
||||
fake_vc.pop('type')
|
||||
fake_vc.pop('extra')
|
||||
fake_vc.pop('node_uuid')
|
||||
self.baremetal_mock.volume_connector.list.return_value = [
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
fake_vc,
|
||||
loaded=True)
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'detail': False,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'fields': ('uuid', 'connector_id')
|
||||
}
|
||||
self.baremetal_mock.volume_connector.list.assert_called_with(**kwargs)
|
||||
|
||||
collist = ('UUID', 'Connector ID')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_connector_list_fields_multiple(self):
|
||||
arglist = ['--fields', 'uuid', 'connector_id', '--fields', 'extra']
|
||||
verifylist = [('fields', [['uuid', 'connector_id'], ['extra']])]
|
||||
|
||||
fake_vc = copy.deepcopy(baremetal_fakes.VOLUME_CONNECTOR)
|
||||
fake_vc.pop('type')
|
||||
fake_vc.pop('node_uuid')
|
||||
self.baremetal_mock.volume_connector.list.return_value = [
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
fake_vc,
|
||||
loaded=True)
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'detail': False,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'fields': ('uuid', 'connector_id', 'extra')
|
||||
}
|
||||
self.baremetal_mock.volume_connector.list.assert_called_with(**kwargs)
|
||||
|
||||
collist = ('UUID', 'Connector ID', 'Extra')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
baremetal_fakes.baremetal_volume_connector_connector_id,
|
||||
baremetal_fakes.baremetal_volume_connector_extra),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_connector_list_invalid_fields(self):
|
||||
arglist = ['--fields', 'uuid', 'invalid']
|
||||
verifylist = [('fields', [['uuid', 'invalid']])]
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_connector_list_marker(self):
|
||||
arglist = ['--marker', baremetal_fakes.baremetal_volume_connector_uuid]
|
||||
verifylist = [
|
||||
('marker', baremetal_fakes.baremetal_volume_connector_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
'limit': None}
|
||||
self.baremetal_mock.volume_connector.list.assert_called_once_with(
|
||||
**kwargs)
|
||||
|
||||
def test_baremetal_volume_connector_list_limit(self):
|
||||
arglist = ['--limit', '10']
|
||||
verifylist = [('limit', 10)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': 10}
|
||||
self.baremetal_mock.volume_connector.list.assert_called_once_with(
|
||||
**kwargs)
|
||||
|
||||
def test_baremetal_volume_connector_list_sort(self):
|
||||
arglist = ['--sort', 'type']
|
||||
verifylist = [('sort', 'type')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None}
|
||||
self.baremetal_mock.volume_connector.list.assert_called_once_with(
|
||||
**kwargs)
|
||||
|
||||
def test_baremetal_volume_connector_list_sort_desc(self):
|
||||
arglist = ['--sort', 'type:desc']
|
||||
verifylist = [('sort', 'type:desc')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None}
|
||||
self.baremetal_mock.volume_connector.list.assert_called_once_with(
|
||||
**kwargs)
|
||||
|
||||
def test_baremetal_volume_connector_list_exclusive_options(self):
|
||||
arglist = ['--fields', 'uuid', '--long']
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, [])
|
||||
|
||||
def test_baremetal_volume_connector_list_negative_limit(self):
|
||||
arglist = ['--limit', '-1']
|
||||
verifylist = [('limit', -1)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.assertRaises(exc.CommandError,
|
||||
self.cmd.take_action,
|
||||
parsed_args)
|
||||
|
||||
|
||||
class TestDeleteBaremetalVolumeConnector(TestBaremetalVolumeConnector):
|
||||
|
||||
def setUp(self):
|
||||
super(TestDeleteBaremetalVolumeConnector, self).setUp()
|
||||
|
||||
self.cmd = (
|
||||
bm_vol_connector.DeleteBaremetalVolumeConnector(self.app, None))
|
||||
|
||||
def test_baremetal_volume_connector_delete(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_connector_uuid]
|
||||
verifylist = [('volume_connectors',
|
||||
[baremetal_fakes.baremetal_volume_connector_uuid])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.baremetal_mock.volume_connector.delete.assert_called_with(
|
||||
baremetal_fakes.baremetal_volume_connector_uuid)
|
||||
|
||||
def test_baremetal_volume_connector_delete_multiple(self):
|
||||
fake_volume_connector_uuid2 = 'vvv-cccccc-cccc'
|
||||
arglist = [baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
fake_volume_connector_uuid2]
|
||||
verifylist = [('volume_connectors',
|
||||
[baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
fake_volume_connector_uuid2])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.baremetal_mock.volume_connector.delete.has_calls(
|
||||
[mock.call(baremetal_fakes.baremetal_volume_connector_uuid),
|
||||
mock.call(fake_volume_connector_uuid2)])
|
||||
self.assertEqual(
|
||||
2, self.baremetal_mock.volume_connector.delete.call_count)
|
||||
|
||||
def test_baremetal_volume_connector_delete_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_connector_delete_error(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_connector_uuid]
|
||||
verifylist = [('volume_connectors',
|
||||
[baremetal_fakes.baremetal_volume_connector_uuid])]
|
||||
|
||||
self.baremetal_mock.volume_connector.delete.side_effect = (
|
||||
exc.NotFound())
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.assertRaises(exc.ClientException,
|
||||
self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.baremetal_mock.volume_connector.delete.assert_called_with(
|
||||
baremetal_fakes.baremetal_volume_connector_uuid)
|
||||
|
||||
def test_baremetal_volume_connector_delete_multiple_error(self):
|
||||
fake_volume_connector_uuid2 = 'vvv-cccccc-cccc'
|
||||
arglist = [baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
fake_volume_connector_uuid2]
|
||||
verifylist = [('volume_connectors',
|
||||
[baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
fake_volume_connector_uuid2])]
|
||||
|
||||
self.baremetal_mock.volume_connector.delete.side_effect = [
|
||||
None, exc.NotFound()]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.assertRaises(exc.ClientException,
|
||||
self.cmd.take_action,
|
||||
parsed_args)
|
||||
|
||||
self.baremetal_mock.volume_connector.delete.has_calls(
|
||||
[mock.call(baremetal_fakes.baremetal_volume_connector_uuid),
|
||||
mock.call(fake_volume_connector_uuid2)])
|
||||
self.assertEqual(
|
||||
2, self.baremetal_mock.volume_connector.delete.call_count)
|
||||
|
||||
|
||||
class TestSetBaremetalVolumeConnector(TestBaremetalVolumeConnector):
|
||||
def setUp(self):
|
||||
super(TestSetBaremetalVolumeConnector, self).setUp()
|
||||
|
||||
self.cmd = (
|
||||
bm_vol_connector.SetBaremetalVolumeConnector(self.app, None))
|
||||
|
||||
def test_baremetal_volume_connector_set_node_uuid(self):
|
||||
new_node_uuid = 'xxx-xxxxxx-zzzz'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
'--node', new_node_uuid]
|
||||
verifylist = [
|
||||
('volume_connector',
|
||||
baremetal_fakes.baremetal_volume_connector_uuid),
|
||||
('node_uuid', new_node_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_connector.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
[{'path': '/node_uuid', 'value': new_node_uuid, 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_connector_set_type(self):
|
||||
new_type = 'wwnn'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
'--type', new_type]
|
||||
verifylist = [
|
||||
('volume_connector',
|
||||
baremetal_fakes.baremetal_volume_connector_uuid),
|
||||
('type', new_type)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_connector.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
[{'path': '/type', 'value': new_type, 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_connector_set_invalid_type(self):
|
||||
new_type = 'invalid'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
'--type', new_type]
|
||||
verifylist = None
|
||||
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_connector_set_connector_id(self):
|
||||
new_conn_id = '11:22:33:44:55:66:77:88'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
'--connector-id', new_conn_id]
|
||||
verifylist = [
|
||||
('volume_connector',
|
||||
baremetal_fakes.baremetal_volume_connector_uuid),
|
||||
('connector_id', new_conn_id)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_connector.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
[{'path': '/connector_id', 'value': new_conn_id, 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_connector_set_type_and_connector_id(self):
|
||||
new_type = 'wwnn'
|
||||
new_conn_id = '11:22:33:44:55:66:77:88'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
'--type', new_type,
|
||||
'--connector-id', new_conn_id]
|
||||
verifylist = [
|
||||
('volume_connector',
|
||||
baremetal_fakes.baremetal_volume_connector_uuid),
|
||||
('type', new_type),
|
||||
('connector_id', new_conn_id)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_connector.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
[{'path': '/type', 'value': new_type, 'op': 'add'},
|
||||
{'path': '/connector_id', 'value': new_conn_id, 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_connector_set_extra(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
'--extra', 'foo=bar']
|
||||
verifylist = [
|
||||
('volume_connector',
|
||||
baremetal_fakes.baremetal_volume_connector_uuid),
|
||||
('extra', ['foo=bar'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_connector.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
[{'path': '/extra/foo', 'value': 'bar', 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_connector_set_multiple_extras(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
'--extra', 'key1=val1', '--extra', 'key2=val2']
|
||||
verifylist = [
|
||||
('volume_connector',
|
||||
baremetal_fakes.baremetal_volume_connector_uuid),
|
||||
('extra', ['key1=val1', 'key2=val2'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_connector.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
[{'path': '/extra/key1', 'value': 'val1', 'op': 'add'},
|
||||
{'path': '/extra/key2', 'value': 'val2', 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_connector_set_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_connector_set_no_property(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_connector_uuid]
|
||||
verifylist = [('volume_connector',
|
||||
baremetal_fakes.baremetal_volume_connector_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_connector.update.assert_not_called()
|
||||
|
||||
|
||||
class TestUnsetBaremetalVolumeConnector(TestBaremetalVolumeConnector):
|
||||
def setUp(self):
|
||||
super(TestUnsetBaremetalVolumeConnector, self).setUp()
|
||||
|
||||
self.cmd = (
|
||||
bm_vol_connector.UnsetBaremetalVolumeConnector(self.app, None))
|
||||
|
||||
def test_baremetal_volume_connector_unset_extra(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
'--extra', 'key1']
|
||||
verifylist = [('volume_connector',
|
||||
baremetal_fakes.baremetal_volume_connector_uuid),
|
||||
('extra', ['key1'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_connector.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
[{'path': '/extra/key1', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_volume_connector_unset_multiple_extras(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
'--extra', 'key1', '--extra', 'key2']
|
||||
verifylist = [('volume_connector',
|
||||
baremetal_fakes.baremetal_volume_connector_uuid),
|
||||
('extra', ['key1', 'key2'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_connector.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_connector_uuid,
|
||||
[{'path': '/extra/key1', 'op': 'remove'},
|
||||
{'path': '/extra/key2', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_volume_connector_unset_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_connector_unset_no_property(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_connector_uuid]
|
||||
verifylist = [('volume_connector',
|
||||
baremetal_fakes.baremetal_volume_connector_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_connector.update.assert_not_called()
|
@ -1,977 +0,0 @@
|
||||
# Copyright 2017 FUJITSU LIMITED
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import mock
|
||||
from osc_lib.tests import utils as osctestutils
|
||||
|
||||
from ironicclient import exc
|
||||
from ironicclient.osc.v1 import baremetal_volume_target as bm_vol_target
|
||||
from ironicclient.tests.unit.osc.v1 import fakes as baremetal_fakes
|
||||
|
||||
|
||||
class TestBaremetalVolumeTarget(baremetal_fakes.TestBaremetal):
|
||||
|
||||
def setUp(self):
|
||||
super(TestBaremetalVolumeTarget, self).setUp()
|
||||
|
||||
self.baremetal_mock = self.app.client_manager.baremetal
|
||||
self.baremetal_mock.reset_mock()
|
||||
|
||||
|
||||
class TestCreateBaremetalVolumeTarget(TestBaremetalVolumeTarget):
|
||||
|
||||
def setUp(self):
|
||||
super(TestCreateBaremetalVolumeTarget, self).setUp()
|
||||
|
||||
self.baremetal_mock.volume_target.create.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.VOLUME_TARGET),
|
||||
loaded=True,
|
||||
))
|
||||
|
||||
# Get the command object to test
|
||||
self.cmd = (
|
||||
bm_vol_target.CreateBaremetalVolumeTarget(self.app, None))
|
||||
|
||||
def test_baremetal_volume_target_create(self):
|
||||
arglist = [
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--type',
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
'--boot-index',
|
||||
baremetal_fakes.baremetal_volume_target_boot_index,
|
||||
'--volume-id',
|
||||
baremetal_fakes.baremetal_volume_target_volume_id,
|
||||
'--uuid', baremetal_fakes.baremetal_volume_target_uuid,
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('volume_type',
|
||||
baremetal_fakes.baremetal_volume_target_volume_type),
|
||||
('boot_index',
|
||||
baremetal_fakes.baremetal_volume_target_boot_index),
|
||||
('volume_id',
|
||||
baremetal_fakes.baremetal_volume_target_volume_id),
|
||||
('uuid', baremetal_fakes.baremetal_volume_target_uuid),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
args = {
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'volume_type':
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
'boot_index':
|
||||
baremetal_fakes.baremetal_volume_target_boot_index,
|
||||
'volume_id':
|
||||
baremetal_fakes.baremetal_volume_target_volume_id,
|
||||
'uuid': baremetal_fakes.baremetal_volume_target_uuid,
|
||||
}
|
||||
|
||||
self.baremetal_mock.volume_target.create.assert_called_once_with(
|
||||
**args)
|
||||
|
||||
def test_baremetal_volume_target_create_without_uuid(self):
|
||||
arglist = [
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--type',
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
'--boot-index',
|
||||
baremetal_fakes.baremetal_volume_target_boot_index,
|
||||
'--volume-id',
|
||||
baremetal_fakes.baremetal_volume_target_volume_id,
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('volume_type',
|
||||
baremetal_fakes.baremetal_volume_target_volume_type),
|
||||
('boot_index',
|
||||
baremetal_fakes.baremetal_volume_target_boot_index),
|
||||
('volume_id',
|
||||
baremetal_fakes.baremetal_volume_target_volume_id),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
args = {
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'volume_type':
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
'boot_index':
|
||||
baremetal_fakes.baremetal_volume_target_boot_index,
|
||||
'volume_id':
|
||||
baremetal_fakes.baremetal_volume_target_volume_id,
|
||||
}
|
||||
|
||||
self.baremetal_mock.volume_target.create.assert_called_once_with(
|
||||
**args)
|
||||
|
||||
def test_baremetal_volume_target_create_extras(self):
|
||||
arglist = [
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--type',
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
'--boot-index',
|
||||
baremetal_fakes.baremetal_volume_target_boot_index,
|
||||
'--volume-id',
|
||||
baremetal_fakes.baremetal_volume_target_volume_id,
|
||||
'--extra', 'key1=value1',
|
||||
'--extra', 'key2=value2',
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('volume_type',
|
||||
baremetal_fakes.baremetal_volume_target_volume_type),
|
||||
('boot_index',
|
||||
baremetal_fakes.baremetal_volume_target_boot_index),
|
||||
('volume_id',
|
||||
baremetal_fakes.baremetal_volume_target_volume_id),
|
||||
('extra', ['key1=value1', 'key2=value2'])
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
args = {
|
||||
'node_uuid': baremetal_fakes.baremetal_uuid,
|
||||
'volume_type':
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
'boot_index':
|
||||
baremetal_fakes.baremetal_volume_target_boot_index,
|
||||
'volume_id':
|
||||
baremetal_fakes.baremetal_volume_target_volume_id,
|
||||
'extra': baremetal_fakes.baremetal_volume_target_extra,
|
||||
}
|
||||
|
||||
self.baremetal_mock.volume_target.create.assert_called_once_with(
|
||||
**args)
|
||||
|
||||
def _test_baremetal_volume_target_missing_param(self, missing):
|
||||
argdict = {
|
||||
'--node': baremetal_fakes.baremetal_uuid,
|
||||
'--type':
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
'--boot-index':
|
||||
baremetal_fakes.baremetal_volume_target_boot_index,
|
||||
'--volume-id':
|
||||
baremetal_fakes.baremetal_volume_target_volume_id,
|
||||
'--uuid': baremetal_fakes.baremetal_volume_target_uuid,
|
||||
}
|
||||
|
||||
arglist = []
|
||||
for k, v in argdict.items():
|
||||
if k not in missing:
|
||||
arglist += [k, v]
|
||||
verifylist = None
|
||||
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_target_create_missing_node(self):
|
||||
self._test_baremetal_volume_target_missing_param(['--node'])
|
||||
|
||||
def test_baremetal_volume_target_create_missing_type(self):
|
||||
self._test_baremetal_volume_target_missing_param(['--type'])
|
||||
|
||||
def test_baremetal_volume_target_create_missing_boot_index(self):
|
||||
self._test_baremetal_volume_target_missing_param(['--boot-index'])
|
||||
|
||||
def test_baremetal_volume_target_create_missing_volume_id(self):
|
||||
self._test_baremetal_volume_target_missing_param(['--volume-id'])
|
||||
|
||||
def test_baremetal_volume_target_create_invalid_boot_index(self):
|
||||
arglist = [
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--type',
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
'--boot-index', 'string',
|
||||
'--volume-id',
|
||||
baremetal_fakes.baremetal_volume_target_volume_id,
|
||||
]
|
||||
verifylist = None
|
||||
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_target_create_negative_boot_index(self):
|
||||
arglist = [
|
||||
'--node', baremetal_fakes.baremetal_uuid,
|
||||
'--type',
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
'--boot-index', '-1',
|
||||
'--volume-id',
|
||||
baremetal_fakes.baremetal_volume_target_volume_id,
|
||||
]
|
||||
|
||||
verifylist = [
|
||||
('node_uuid', baremetal_fakes.baremetal_uuid),
|
||||
('volume_type',
|
||||
baremetal_fakes.baremetal_volume_target_volume_type),
|
||||
('boot_index', -1),
|
||||
('volume_id',
|
||||
baremetal_fakes.baremetal_volume_target_volume_id),
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.assertRaises(exc.CommandError, self.cmd.take_action, parsed_args)
|
||||
|
||||
|
||||
class TestShowBaremetalVolumeTarget(TestBaremetalVolumeTarget):
|
||||
|
||||
def setUp(self):
|
||||
super(TestShowBaremetalVolumeTarget, self).setUp()
|
||||
|
||||
self.baremetal_mock.volume_target.get.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.VOLUME_TARGET),
|
||||
loaded=True))
|
||||
|
||||
self.cmd = (
|
||||
bm_vol_target.ShowBaremetalVolumeTarget(self.app, None))
|
||||
|
||||
def test_baremetal_volume_target_show(self):
|
||||
arglist = ['vvv-tttttt-vvvv']
|
||||
verifylist = [('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
args = ['vvv-tttttt-vvvv']
|
||||
self.baremetal_mock.volume_target.get.assert_called_once_with(
|
||||
*args, fields=None)
|
||||
collist = ('boot_index', 'extra', 'node_uuid', 'properties', 'uuid',
|
||||
'volume_id', 'volume_type')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = (
|
||||
baremetal_fakes.baremetal_volume_target_boot_index,
|
||||
baremetal_fakes.baremetal_volume_target_extra,
|
||||
baremetal_fakes.baremetal_uuid,
|
||||
baremetal_fakes.baremetal_volume_target_properties,
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
baremetal_fakes.baremetal_volume_target_volume_id,
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_target_show_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_target_show_fields(self):
|
||||
arglist = ['vvv-tttttt-vvvv', '--fields', 'uuid', 'volume_id']
|
||||
verifylist = [('fields', [['uuid', 'volume_id']]),
|
||||
('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid)]
|
||||
|
||||
fake_vt = copy.deepcopy(baremetal_fakes.VOLUME_TARGET)
|
||||
fake_vt.pop('node_uuid')
|
||||
fake_vt.pop('volume_type')
|
||||
fake_vt.pop('boot_index')
|
||||
fake_vt.pop('extra')
|
||||
fake_vt.pop('properties')
|
||||
self.baremetal_mock.volume_target.get.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
fake_vt,
|
||||
loaded=True))
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
args = ['vvv-tttttt-vvvv']
|
||||
fields = ['uuid', 'volume_id']
|
||||
self.baremetal_mock.volume_target.get.assert_called_once_with(
|
||||
*args, fields=fields)
|
||||
collist = ('uuid', 'volume_id')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = (
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
baremetal_fakes.baremetal_volume_target_volume_id,
|
||||
)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_target_show_fields_multiple(self):
|
||||
arglist = ['vvv-tttttt-vvvv', '--fields', 'uuid', 'volume_id',
|
||||
'--fields', 'volume_type']
|
||||
verifylist = [('fields', [['uuid', 'volume_id'], ['volume_type']]),
|
||||
('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid)]
|
||||
|
||||
fake_vt = copy.deepcopy(baremetal_fakes.VOLUME_TARGET)
|
||||
fake_vt.pop('node_uuid')
|
||||
fake_vt.pop('boot_index')
|
||||
fake_vt.pop('extra')
|
||||
fake_vt.pop('properties')
|
||||
self.baremetal_mock.volume_target.get.return_value = (
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
fake_vt,
|
||||
loaded=True))
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
args = ['vvv-tttttt-vvvv']
|
||||
fields = ['uuid', 'volume_id', 'volume_type']
|
||||
self.baremetal_mock.volume_target.get.assert_called_once_with(
|
||||
*args, fields=fields)
|
||||
collist = ('uuid', 'volume_id', 'volume_type')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = (
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
baremetal_fakes.baremetal_volume_target_volume_id,
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_target_show_invalid_fields(self):
|
||||
arglist = ['vvv-tttttt-vvvv', '--fields', 'uuid', 'invalid']
|
||||
verifylist = None
|
||||
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
|
||||
class TestListBaremetalVolumeTarget(TestBaremetalVolumeTarget):
|
||||
def setUp(self):
|
||||
super(TestListBaremetalVolumeTarget, self).setUp()
|
||||
|
||||
self.baremetal_mock.volume_target.list.return_value = [
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
copy.deepcopy(baremetal_fakes.VOLUME_TARGET),
|
||||
loaded=True)
|
||||
]
|
||||
self.cmd = (
|
||||
bm_vol_target.ListBaremetalVolumeTarget(self.app, None))
|
||||
|
||||
def test_baremetal_volume_target_list(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None}
|
||||
self.baremetal_mock.volume_target.list.assert_called_once_with(
|
||||
**kwargs)
|
||||
|
||||
collist = (
|
||||
"UUID",
|
||||
"Node UUID",
|
||||
"Driver Volume Type",
|
||||
"Boot Index",
|
||||
"Volume ID")
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_volume_target_uuid,
|
||||
baremetal_fakes.baremetal_uuid,
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
baremetal_fakes.baremetal_volume_target_boot_index,
|
||||
baremetal_fakes.baremetal_volume_target_volume_id),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_target_list_node(self):
|
||||
arglist = ['--node', baremetal_fakes.baremetal_uuid]
|
||||
verifylist = [('node', baremetal_fakes.baremetal_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'node': baremetal_fakes.baremetal_uuid,
|
||||
'marker': None,
|
||||
'limit': None}
|
||||
self.baremetal_mock.volume_target.list.assert_called_once_with(
|
||||
**kwargs)
|
||||
|
||||
collist = (
|
||||
"UUID",
|
||||
"Node UUID",
|
||||
"Driver Volume Type",
|
||||
"Boot Index",
|
||||
"Volume ID")
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_volume_target_uuid,
|
||||
baremetal_fakes.baremetal_uuid,
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
baremetal_fakes.baremetal_volume_target_boot_index,
|
||||
baremetal_fakes.baremetal_volume_target_volume_id),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_target_list_long(self):
|
||||
arglist = ['--long']
|
||||
verifylist = [('detail', True)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'detail': True,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
}
|
||||
self.baremetal_mock.volume_target.list.assert_called_with(**kwargs)
|
||||
|
||||
collist = ('UUID', 'Node UUID', 'Driver Volume Type', 'Properties',
|
||||
'Boot Index', 'Extra', 'Volume ID', 'Created At',
|
||||
'Updated At')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_volume_target_uuid,
|
||||
baremetal_fakes.baremetal_uuid,
|
||||
baremetal_fakes.baremetal_volume_target_volume_type,
|
||||
baremetal_fakes.baremetal_volume_target_properties,
|
||||
baremetal_fakes.baremetal_volume_target_boot_index,
|
||||
baremetal_fakes.baremetal_volume_target_extra,
|
||||
baremetal_fakes.baremetal_volume_target_volume_id,
|
||||
'',
|
||||
''),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_target_list_fields(self):
|
||||
arglist = ['--fields', 'uuid', 'boot_index']
|
||||
verifylist = [('fields', [['uuid', 'boot_index']])]
|
||||
|
||||
fake_vt = copy.deepcopy(baremetal_fakes.VOLUME_TARGET)
|
||||
fake_vt.pop('volume_type')
|
||||
fake_vt.pop('extra')
|
||||
fake_vt.pop('properties')
|
||||
fake_vt.pop('volume_id')
|
||||
fake_vt.pop('node_uuid')
|
||||
self.baremetal_mock.volume_target.list.return_value = [
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
fake_vt,
|
||||
loaded=True)
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'detail': False,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'fields': ('uuid', 'boot_index')
|
||||
}
|
||||
self.baremetal_mock.volume_target.list.assert_called_with(**kwargs)
|
||||
|
||||
collist = ('UUID', 'Boot Index')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_volume_target_uuid,
|
||||
baremetal_fakes.baremetal_volume_target_boot_index),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_target_list_fields_multiple(self):
|
||||
arglist = ['--fields', 'uuid', 'boot_index', '--fields', 'extra']
|
||||
verifylist = [('fields', [['uuid', 'boot_index'], ['extra']])]
|
||||
|
||||
fake_vt = copy.deepcopy(baremetal_fakes.VOLUME_TARGET)
|
||||
fake_vt.pop('volume_type')
|
||||
fake_vt.pop('properties')
|
||||
fake_vt.pop('volume_id')
|
||||
fake_vt.pop('node_uuid')
|
||||
self.baremetal_mock.volume_target.list.return_value = [
|
||||
baremetal_fakes.FakeBaremetalResource(
|
||||
None,
|
||||
fake_vt,
|
||||
loaded=True)
|
||||
]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'detail': False,
|
||||
'marker': None,
|
||||
'limit': None,
|
||||
'fields': ('uuid', 'boot_index', 'extra')
|
||||
}
|
||||
self.baremetal_mock.volume_target.list.assert_called_with(**kwargs)
|
||||
|
||||
collist = ('UUID', 'Boot Index', 'Extra')
|
||||
self.assertEqual(collist, columns)
|
||||
|
||||
datalist = ((baremetal_fakes.baremetal_volume_target_uuid,
|
||||
baremetal_fakes.baremetal_volume_target_boot_index,
|
||||
baremetal_fakes.baremetal_volume_target_extra),)
|
||||
self.assertEqual(datalist, tuple(data))
|
||||
|
||||
def test_baremetal_volume_target_list_invalid_fields(self):
|
||||
arglist = ['--fields', 'uuid', 'invalid']
|
||||
verifylist = [('fields', [['uuid', 'invalid']])]
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_target_list_marker(self):
|
||||
arglist = ['--marker', baremetal_fakes.baremetal_volume_target_uuid]
|
||||
verifylist = [
|
||||
('marker', baremetal_fakes.baremetal_volume_target_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'limit': None}
|
||||
self.baremetal_mock.volume_target.list.assert_called_once_with(
|
||||
**kwargs)
|
||||
|
||||
def test_baremetal_volume_target_list_limit(self):
|
||||
arglist = ['--limit', '10']
|
||||
verifylist = [('limit', 10)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': 10}
|
||||
self.baremetal_mock.volume_target.list.assert_called_once_with(
|
||||
**kwargs)
|
||||
|
||||
def test_baremetal_volume_target_list_sort(self):
|
||||
arglist = ['--sort', 'boot_index']
|
||||
verifylist = [('sort', 'boot_index')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None}
|
||||
self.baremetal_mock.volume_target.list.assert_called_once_with(
|
||||
**kwargs)
|
||||
|
||||
def test_baremetal_volume_target_list_sort_desc(self):
|
||||
arglist = ['--sort', 'boot_index:desc']
|
||||
verifylist = [('sort', 'boot_index:desc')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
kwargs = {
|
||||
'marker': None,
|
||||
'limit': None}
|
||||
self.baremetal_mock.volume_target.list.assert_called_once_with(
|
||||
**kwargs)
|
||||
|
||||
def test_baremetal_volume_target_list_exclusive_options(self):
|
||||
arglist = ['--fields', 'uuid', '--long']
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, [])
|
||||
|
||||
def test_baremetal_volume_target_list_negative_limit(self):
|
||||
arglist = ['--limit', '-1']
|
||||
verifylist = [('limit', -1)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.assertRaises(exc.CommandError,
|
||||
self.cmd.take_action,
|
||||
parsed_args)
|
||||
|
||||
|
||||
class TestDeleteBaremetalVolumeTarget(TestBaremetalVolumeTarget):
|
||||
|
||||
def setUp(self):
|
||||
super(TestDeleteBaremetalVolumeTarget, self).setUp()
|
||||
|
||||
self.cmd = bm_vol_target.DeleteBaremetalVolumeTarget(self.app, None)
|
||||
|
||||
def test_baremetal_volume_target_delete(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_target_uuid]
|
||||
verifylist = [('volume_targets',
|
||||
[baremetal_fakes.baremetal_volume_target_uuid])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.baremetal_mock.volume_target.delete.assert_called_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid)
|
||||
|
||||
def test_baremetal_volume_target_delete_multiple(self):
|
||||
fake_volume_target_uuid2 = 'vvv-tttttt-tttt'
|
||||
arglist = [baremetal_fakes.baremetal_volume_target_uuid,
|
||||
fake_volume_target_uuid2]
|
||||
verifylist = [('volume_targets',
|
||||
[baremetal_fakes.baremetal_volume_target_uuid,
|
||||
fake_volume_target_uuid2])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
self.baremetal_mock.volume_target.delete.has_calls(
|
||||
[mock.call(baremetal_fakes.baremetal_volume_target_uuid),
|
||||
mock.call(fake_volume_target_uuid2)])
|
||||
self.assertEqual(
|
||||
2, self.baremetal_mock.volume_target.delete.call_count)
|
||||
|
||||
def test_baremetal_volume_target_delete_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_target_delete_error(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_target_uuid]
|
||||
verifylist = [('volume_targets',
|
||||
[baremetal_fakes.baremetal_volume_target_uuid])]
|
||||
|
||||
self.baremetal_mock.volume_target.delete.side_effect = (
|
||||
exc.NotFound())
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.assertRaises(exc.ClientException,
|
||||
self.cmd.take_action,
|
||||
parsed_args)
|
||||
self.baremetal_mock.volume_target.delete.assert_called_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid)
|
||||
|
||||
def test_baremetal_volume_target_delete_multiple_error(self):
|
||||
fake_volume_target_uuid2 = 'vvv-tttttt-tttt'
|
||||
arglist = [baremetal_fakes.baremetal_volume_target_uuid,
|
||||
fake_volume_target_uuid2]
|
||||
verifylist = [('volume_targets',
|
||||
[baremetal_fakes.baremetal_volume_target_uuid,
|
||||
fake_volume_target_uuid2])]
|
||||
|
||||
self.baremetal_mock.volume_target.delete.side_effect = [
|
||||
None, exc.NotFound()]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.assertRaises(exc.ClientException,
|
||||
self.cmd.take_action,
|
||||
parsed_args)
|
||||
|
||||
self.baremetal_mock.volume_target.delete.has_calls(
|
||||
[mock.call(baremetal_fakes.baremetal_volume_target_uuid),
|
||||
mock.call(fake_volume_target_uuid2)])
|
||||
self.assertEqual(
|
||||
2, self.baremetal_mock.volume_target.delete.call_count)
|
||||
|
||||
|
||||
class TestSetBaremetalVolumeTarget(TestBaremetalVolumeTarget):
|
||||
def setUp(self):
|
||||
super(TestSetBaremetalVolumeTarget, self).setUp()
|
||||
|
||||
self.cmd = (
|
||||
bm_vol_target.SetBaremetalVolumeTarget(self.app, None))
|
||||
|
||||
def test_baremetal_volume_target_set_node_uuid(self):
|
||||
new_node_uuid = 'xxx-xxxxxx-zzzz'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--node', new_node_uuid]
|
||||
verifylist = [
|
||||
('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('node_uuid', new_node_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
[{'path': '/node_uuid', 'value': new_node_uuid, 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_target_set_volume_type(self):
|
||||
new_type = 'fibre_channel'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--type', new_type]
|
||||
verifylist = [
|
||||
('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('volume_type', new_type)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
[{'path': '/volume_type', 'value': new_type, 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_target_set_boot_index(self):
|
||||
new_boot_idx = '3'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--boot-index', new_boot_idx]
|
||||
verifylist = [
|
||||
('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('boot_index', int(new_boot_idx))]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
[{'path': '/boot_index', 'value': int(new_boot_idx), 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_target_set_negative_boot_index(self):
|
||||
new_boot_idx = '-3'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--boot-index', new_boot_idx]
|
||||
verifylist = [
|
||||
('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('boot_index', int(new_boot_idx))]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.assertRaises(exc.CommandError, self.cmd.take_action, parsed_args)
|
||||
|
||||
def test_baremetal_volume_target_set_invalid_boot_index(self):
|
||||
new_boot_idx = 'string'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--boot-index', new_boot_idx]
|
||||
verifylist = None
|
||||
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_target_set_volume_id(self):
|
||||
new_volume_id = 'new-volume-id'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--volume-id', new_volume_id]
|
||||
verifylist = [
|
||||
('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('volume_id', new_volume_id)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
[{'path': '/volume_id', 'value': new_volume_id, 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_target_set_volume_type_and_volume_id(self):
|
||||
new_volume_type = 'fibre_channel'
|
||||
new_volume_id = 'new-volume-id'
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--type', new_volume_type,
|
||||
'--volume-id', new_volume_id]
|
||||
verifylist = [
|
||||
('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('volume_type', new_volume_type),
|
||||
('volume_id', new_volume_id)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
[{'path': '/volume_type', 'value': new_volume_type, 'op': 'add'},
|
||||
{'path': '/volume_id', 'value': new_volume_id, 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_target_set_extra(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--extra', 'foo=bar']
|
||||
verifylist = [
|
||||
('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('extra', ['foo=bar'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
[{'path': '/extra/foo', 'value': 'bar', 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_target_set_multiple_extras(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--extra', 'key1=val1', '--extra', 'key2=val2']
|
||||
verifylist = [
|
||||
('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('extra', ['key1=val1', 'key2=val2'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
[{'path': '/extra/key1', 'value': 'val1', 'op': 'add'},
|
||||
{'path': '/extra/key2', 'value': 'val2', 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_target_set_property(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--property', 'foo=bar']
|
||||
verifylist = [
|
||||
('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('properties', ['foo=bar'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
[{'path': '/properties/foo', 'value': 'bar', 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_target_set_multiple_properties(self):
|
||||
arglist = [
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--property', 'key1=val1', '--property', 'key2=val2']
|
||||
verifylist = [
|
||||
('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('properties', ['key1=val1', 'key2=val2'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
[{'path': '/properties/key1', 'value': 'val1', 'op': 'add'},
|
||||
{'path': '/properties/key2', 'value': 'val2', 'op': 'add'}])
|
||||
|
||||
def test_baremetal_volume_target_set_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_target_set_no_property(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_target_uuid]
|
||||
verifylist = [('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_not_called()
|
||||
|
||||
|
||||
class TestUnsetBaremetalVolumeTarget(TestBaremetalVolumeTarget):
|
||||
def setUp(self):
|
||||
super(TestUnsetBaremetalVolumeTarget, self).setUp()
|
||||
|
||||
self.cmd = bm_vol_target.UnsetBaremetalVolumeTarget(self.app, None)
|
||||
|
||||
def test_baremetal_volume_target_unset_extra(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--extra', 'key1']
|
||||
verifylist = [('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('extra', ['key1'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
[{'path': '/extra/key1', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_volume_target_unset_multiple_extras(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--extra', 'key1', '--extra', 'key2']
|
||||
verifylist = [('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('extra', ['key1', 'key2'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
[{'path': '/extra/key1', 'op': 'remove'},
|
||||
{'path': '/extra/key2', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_volume_target_unset_property(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--property', 'key11']
|
||||
verifylist = [('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('properties', ['key11'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
[{'path': '/properties/key11', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_volume_target_unset_multiple_properties(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_target_uuid,
|
||||
'--property', 'key11', '--property', 'key22']
|
||||
verifylist = [('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid),
|
||||
('properties', ['key11', 'key22'])]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_called_once_with(
|
||||
baremetal_fakes.baremetal_volume_target_uuid,
|
||||
[{'path': '/properties/key11', 'op': 'remove'},
|
||||
{'path': '/properties/key22', 'op': 'remove'}])
|
||||
|
||||
def test_baremetal_volume_target_unset_no_options(self):
|
||||
arglist = []
|
||||
verifylist = []
|
||||
self.assertRaises(osctestutils.ParserException,
|
||||
self.check_parser,
|
||||
self.cmd, arglist, verifylist)
|
||||
|
||||
def test_baremetal_volume_target_unset_no_property(self):
|
||||
arglist = [baremetal_fakes.baremetal_volume_target_uuid]
|
||||
verifylist = [('volume_target',
|
||||
baremetal_fakes.baremetal_volume_target_uuid)]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
self.baremetal_mock.volume_target.update.assert_not_called()
|
@ -1,373 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from keystoneauth1 import loading as kaloading
|
||||
|
||||
from ironicclient import client as iroclient
|
||||
from ironicclient.common import filecache
|
||||
from ironicclient.common import http
|
||||
from ironicclient import exc
|
||||
from ironicclient.tests.unit import utils
|
||||
from ironicclient.v1 import client as v1
|
||||
|
||||
|
||||
class ClientTest(utils.BaseTestCase):
|
||||
|
||||
def test_get_client_with_auth_token_ironic_url(self):
|
||||
kwargs = {
|
||||
'ironic_url': 'http://ironic.example.org:6385/',
|
||||
'os_auth_token': 'USER_AUTH_TOKEN',
|
||||
}
|
||||
client = iroclient.get_client('1', **kwargs)
|
||||
|
||||
self.assertEqual('USER_AUTH_TOKEN', client.http_client.auth_token)
|
||||
self.assertEqual('http://ironic.example.org:6385/',
|
||||
client.http_client.endpoint)
|
||||
|
||||
@mock.patch.object(filecache, 'retrieve_data', autospec=True)
|
||||
@mock.patch.object(kaloading.session, 'Session', autospec=True)
|
||||
@mock.patch.object(kaloading, 'get_plugin_loader', autospec=True)
|
||||
def _test_get_client(self, mock_ks_loader, mock_ks_session,
|
||||
mock_retrieve_data, version=None,
|
||||
auth='password', **kwargs):
|
||||
session = mock_ks_session.return_value.load_from_options.return_value
|
||||
session.get_endpoint.return_value = 'http://localhost:6385/v1/f14b4123'
|
||||
mock_ks_loader.return_value.load_from_options.return_value = 'auth'
|
||||
mock_retrieve_data.return_value = version
|
||||
|
||||
client = iroclient.get_client('1', **kwargs)
|
||||
|
||||
mock_ks_loader.assert_called_once_with(auth)
|
||||
mock_ks_session.return_value.load_from_options.assert_called_once_with(
|
||||
auth='auth', timeout=kwargs.get('timeout'),
|
||||
insecure=kwargs.get('insecure'), cert=kwargs.get('cert'),
|
||||
cacert=kwargs.get('cacert'), key=kwargs.get('key'))
|
||||
session.get_endpoint.assert_called_once_with(
|
||||
service_type=kwargs.get('os_service_type') or 'baremetal',
|
||||
interface=kwargs.get('os_endpoint_type') or 'publicURL',
|
||||
region_name=kwargs.get('os_region_name'))
|
||||
if 'os_ironic_api_version' in kwargs:
|
||||
self.assertEqual(0, mock_retrieve_data.call_count)
|
||||
else:
|
||||
mock_retrieve_data.assert_called_once_with(
|
||||
host='localhost',
|
||||
port='6385')
|
||||
self.assertEqual(version or v1.DEFAULT_VER,
|
||||
client.http_client.os_ironic_api_version)
|
||||
|
||||
def test_get_client_no_auth_token(self):
|
||||
kwargs = {
|
||||
'os_project_name': 'PROJECT_NAME',
|
||||
'os_username': 'USERNAME',
|
||||
'os_password': 'PASSWORD',
|
||||
'os_auth_url': 'http://localhost:35357/v2.0',
|
||||
'os_auth_token': '',
|
||||
}
|
||||
self._test_get_client(**kwargs)
|
||||
|
||||
def test_get_client_service_and_endpoint_type_defaults(self):
|
||||
kwargs = {
|
||||
'os_project_name': 'PROJECT_NAME',
|
||||
'os_username': 'USERNAME',
|
||||
'os_password': 'PASSWORD',
|
||||
'os_auth_url': 'http://localhost:35357/v2.0',
|
||||
'os_auth_token': '',
|
||||
'os_service_type': '',
|
||||
'os_endpoint_type': ''
|
||||
}
|
||||
self._test_get_client(**kwargs)
|
||||
|
||||
def test_get_client_with_region_no_auth_token(self):
|
||||
kwargs = {
|
||||
'os_project_name': 'PROJECT_NAME',
|
||||
'os_username': 'USERNAME',
|
||||
'os_password': 'PASSWORD',
|
||||
'os_region_name': 'REGIONONE',
|
||||
'os_auth_url': 'http://localhost:35357/v2.0',
|
||||
'os_auth_token': '',
|
||||
}
|
||||
self._test_get_client(**kwargs)
|
||||
|
||||
def test_get_client_no_url(self):
|
||||
kwargs = {
|
||||
'os_project_name': 'PROJECT_NAME',
|
||||
'os_username': 'USERNAME',
|
||||
'os_password': 'PASSWORD',
|
||||
'os_auth_url': '',
|
||||
}
|
||||
self.assertRaises(exc.AmbiguousAuthSystem, iroclient.get_client,
|
||||
'1', **kwargs)
|
||||
# test the alias as well to ensure backwards compatibility
|
||||
self.assertRaises(exc.AmbigiousAuthSystem, iroclient.get_client,
|
||||
'1', **kwargs)
|
||||
|
||||
def test_get_client_incorrect_auth_params(self):
|
||||
kwargs = {
|
||||
'os_project_name': 'PROJECT_NAME',
|
||||
'os_username': 'USERNAME',
|
||||
'os_auth_url': 'http://localhost:35357/v2.0',
|
||||
}
|
||||
self.assertRaises(exc.AmbiguousAuthSystem, iroclient.get_client,
|
||||
'1', **kwargs)
|
||||
|
||||
def test_get_client_with_api_version_latest(self):
|
||||
kwargs = {
|
||||
'os_project_name': 'PROJECT_NAME',
|
||||
'os_username': 'USERNAME',
|
||||
'os_password': 'PASSWORD',
|
||||
'os_auth_url': 'http://localhost:35357/v2.0',
|
||||
'os_auth_token': '',
|
||||
'os_ironic_api_version': "latest",
|
||||
}
|
||||
self._test_get_client(**kwargs)
|
||||
|
||||
def test_get_client_with_api_version_numeric(self):
|
||||
kwargs = {
|
||||
'os_project_name': 'PROJECT_NAME',
|
||||
'os_username': 'USERNAME',
|
||||
'os_password': 'PASSWORD',
|
||||
'os_auth_url': 'http://localhost:35357/v2.0',
|
||||
'os_auth_token': '',
|
||||
'os_ironic_api_version': "1.4",
|
||||
}
|
||||
self._test_get_client(**kwargs)
|
||||
|
||||
def test_get_client_default_version_set_cached(self):
|
||||
version = '1.3'
|
||||
# Make sure we don't coincidentally succeed
|
||||
self.assertNotEqual(v1.DEFAULT_VER, version)
|
||||
kwargs = {
|
||||
'os_project_name': 'PROJECT_NAME',
|
||||
'os_username': 'USERNAME',
|
||||
'os_password': 'PASSWORD',
|
||||
'os_auth_url': 'http://localhost:35357/v2.0',
|
||||
'os_auth_token': '',
|
||||
}
|
||||
self._test_get_client(version=version, **kwargs)
|
||||
|
||||
def test_get_client_with_auth_token(self):
|
||||
kwargs = {
|
||||
'os_auth_url': 'http://localhost:35357/v2.0',
|
||||
'os_auth_token': 'USER_AUTH_TOKEN',
|
||||
}
|
||||
self._test_get_client(auth='token', **kwargs)
|
||||
|
||||
def test_get_client_with_region_name_auth_token(self):
|
||||
kwargs = {
|
||||
'os_auth_url': 'http://localhost:35357/v2.0',
|
||||
'os_region_name': 'REGIONONE',
|
||||
'os_auth_token': 'USER_AUTH_TOKEN',
|
||||
}
|
||||
self._test_get_client(auth='token', **kwargs)
|
||||
|
||||
def test_get_client_only_session_passed(self):
|
||||
session = mock.Mock()
|
||||
session.get_endpoint.return_value = 'http://localhost:35357/v2.0'
|
||||
kwargs = {
|
||||
'session': session,
|
||||
}
|
||||
iroclient.get_client('1', **kwargs)
|
||||
session.get_endpoint.assert_called_once_with(service_type='baremetal',
|
||||
interface='publicURL',
|
||||
region_name=None)
|
||||
|
||||
def test_get_client_incorrect_session_passed(self):
|
||||
session = mock.Mock()
|
||||
session.get_endpoint.side_effect = Exception('boo')
|
||||
kwargs = {
|
||||
'session': session,
|
||||
}
|
||||
self.assertRaises(exc.AmbiguousAuthSystem, iroclient.get_client,
|
||||
'1', **kwargs)
|
||||
|
||||
@mock.patch.object(kaloading.session, 'Session', autospec=True)
|
||||
@mock.patch.object(kaloading, 'get_plugin_loader', autospec=True)
|
||||
def _test_loader_arguments_passed_correctly(
|
||||
self, mock_ks_loader, mock_ks_session,
|
||||
passed_kwargs, expected_kwargs):
|
||||
session = mock_ks_session.return_value.load_from_options.return_value
|
||||
session.get_endpoint.return_value = 'http://localhost:6385/v1/f14b4123'
|
||||
mock_ks_loader.return_value.load_from_options.return_value = 'auth'
|
||||
|
||||
iroclient.get_client('1', **passed_kwargs)
|
||||
|
||||
mock_ks_loader.return_value.load_from_options.assert_called_once_with(
|
||||
**expected_kwargs)
|
||||
mock_ks_session.return_value.load_from_options.assert_called_once_with(
|
||||
auth='auth', timeout=passed_kwargs.get('timeout'),
|
||||
insecure=passed_kwargs.get('insecure'),
|
||||
cert=passed_kwargs.get('cert'),
|
||||
cacert=passed_kwargs.get('cacert'), key=passed_kwargs.get('key'))
|
||||
session.get_endpoint.assert_called_once_with(
|
||||
service_type=passed_kwargs.get('os_service_type') or 'baremetal',
|
||||
interface=passed_kwargs.get('os_endpoint_type') or 'publicURL',
|
||||
region_name=passed_kwargs.get('os_region_name'))
|
||||
|
||||
def test_loader_arguments_token(self):
|
||||
passed_kwargs = {
|
||||
'os_auth_url': 'http://localhost:35357/v3',
|
||||
'os_region_name': 'REGIONONE',
|
||||
'os_auth_token': 'USER_AUTH_TOKEN',
|
||||
}
|
||||
expected_kwargs = {
|
||||
'auth_url': 'http://localhost:35357/v3',
|
||||
'project_id': None,
|
||||
'project_name': None,
|
||||
'user_domain_id': None,
|
||||
'user_domain_name': None,
|
||||
'project_domain_id': None,
|
||||
'project_domain_name': None,
|
||||
'token': 'USER_AUTH_TOKEN'
|
||||
}
|
||||
self._test_loader_arguments_passed_correctly(
|
||||
passed_kwargs=passed_kwargs, expected_kwargs=expected_kwargs)
|
||||
|
||||
def test_loader_arguments_password_tenant_name(self):
|
||||
passed_kwargs = {
|
||||
'os_auth_url': 'http://localhost:35357/v3',
|
||||
'os_region_name': 'REGIONONE',
|
||||
'os_project_name': 'PROJECT',
|
||||
'os_username': 'user',
|
||||
'os_password': '1234',
|
||||
'os_project_domain_id': 'DEFAULT',
|
||||
'os_user_domain_id': 'DEFAULT'
|
||||
}
|
||||
expected_kwargs = {
|
||||
'auth_url': 'http://localhost:35357/v3',
|
||||
'project_id': None,
|
||||
'project_name': 'PROJECT',
|
||||
'user_domain_id': 'DEFAULT',
|
||||
'user_domain_name': None,
|
||||
'project_domain_id': 'DEFAULT',
|
||||
'project_domain_name': None,
|
||||
'username': 'user',
|
||||
'password': '1234'
|
||||
}
|
||||
self._test_loader_arguments_passed_correctly(
|
||||
passed_kwargs=passed_kwargs, expected_kwargs=expected_kwargs)
|
||||
|
||||
def test_loader_arguments_password_project_id(self):
|
||||
passed_kwargs = {
|
||||
'os_auth_url': 'http://localhost:35357/v3',
|
||||
'os_region_name': 'REGIONONE',
|
||||
'os_project_id': '1000',
|
||||
'os_username': 'user',
|
||||
'os_password': '1234',
|
||||
'os_project_domain_name': 'domain1',
|
||||
'os_user_domain_name': 'domain1'
|
||||
}
|
||||
expected_kwargs = {
|
||||
'auth_url': 'http://localhost:35357/v3',
|
||||
'project_id': '1000',
|
||||
'project_name': None,
|
||||
'user_domain_id': None,
|
||||
'user_domain_name': 'domain1',
|
||||
'project_domain_id': None,
|
||||
'project_domain_name': 'domain1',
|
||||
'username': 'user',
|
||||
'password': '1234'
|
||||
}
|
||||
self._test_loader_arguments_passed_correctly(
|
||||
passed_kwargs=passed_kwargs, expected_kwargs=expected_kwargs)
|
||||
|
||||
@mock.patch.object(iroclient, 'Client', autospec=True)
|
||||
@mock.patch.object(kaloading.session, 'Session', autospec=True)
|
||||
def test_correct_arguments_passed_to_client_constructor_noauth_mode(
|
||||
self, mock_ks_session, mock_client):
|
||||
kwargs = {
|
||||
'ironic_url': 'http://ironic.example.org:6385/',
|
||||
'os_auth_token': 'USER_AUTH_TOKEN',
|
||||
'os_ironic_api_version': 'latest',
|
||||
'insecure': True,
|
||||
'max_retries': 10,
|
||||
'retry_interval': 10,
|
||||
'os_cacert': 'data'
|
||||
}
|
||||
iroclient.get_client('1', **kwargs)
|
||||
mock_client.assert_called_once_with(
|
||||
'1', 'http://ironic.example.org:6385/',
|
||||
**{
|
||||
'os_ironic_api_version': 'latest',
|
||||
'max_retries': 10,
|
||||
'retry_interval': 10,
|
||||
'token': 'USER_AUTH_TOKEN',
|
||||
'insecure': True,
|
||||
'ca_file': 'data',
|
||||
'cert_file': None,
|
||||
'key_file': None,
|
||||
'timeout': None,
|
||||
'session': None
|
||||
}
|
||||
)
|
||||
self.assertFalse(mock_ks_session.called)
|
||||
|
||||
@mock.patch.object(iroclient, 'Client', autospec=True)
|
||||
@mock.patch.object(kaloading.session, 'Session', autospec=True)
|
||||
def test_correct_arguments_passed_to_client_constructor_session_created(
|
||||
self, mock_ks_session, mock_client):
|
||||
session = mock_ks_session.return_value.load_from_options.return_value
|
||||
kwargs = {
|
||||
'os_auth_url': 'http://localhost:35357/v3',
|
||||
'os_region_name': 'REGIONONE',
|
||||
'os_project_id': '1000',
|
||||
'os_username': 'user',
|
||||
'os_password': '1234',
|
||||
'os_project_domain_name': 'domain1',
|
||||
'os_user_domain_name': 'domain1'
|
||||
}
|
||||
iroclient.get_client('1', **kwargs)
|
||||
mock_client.assert_called_once_with(
|
||||
'1', session.get_endpoint.return_value,
|
||||
**{
|
||||
'os_ironic_api_version': None,
|
||||
'max_retries': None,
|
||||
'retry_interval': None,
|
||||
'session': session,
|
||||
}
|
||||
)
|
||||
|
||||
@mock.patch.object(iroclient, 'Client', autospec=True)
|
||||
@mock.patch.object(kaloading.session, 'Session', autospec=True)
|
||||
def test_correct_arguments_passed_to_client_constructor_session_passed(
|
||||
self, mock_ks_session, mock_client):
|
||||
session = mock.Mock()
|
||||
kwargs = {
|
||||
'session': session,
|
||||
}
|
||||
iroclient.get_client('1', **kwargs)
|
||||
mock_client.assert_called_once_with(
|
||||
'1', session.get_endpoint.return_value,
|
||||
**{
|
||||
'os_ironic_api_version': None,
|
||||
'max_retries': None,
|
||||
'retry_interval': None,
|
||||
'session': session,
|
||||
}
|
||||
)
|
||||
self.assertFalse(mock_ks_session.called)
|
||||
|
||||
def test_safe_header_with_auth_token(self):
|
||||
(name, value) = ('X-Auth-Token', u'3b640e2e64d946ac8f55615aff221dc1')
|
||||
expected_header = (u'X-Auth-Token',
|
||||
'{SHA1}6de9fb3b0b89099030a54abfeb468e7b1b1f0f2b')
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
header_redact = client._process_header(name, value)
|
||||
self.assertEqual(expected_header, header_redact)
|
||||
|
||||
def test_safe_header_with_no_auth_token(self):
|
||||
name, value = ('Accept', 'application/json')
|
||||
header = ('Accept', 'application/json')
|
||||
client = http.HTTPClient('http://localhost/')
|
||||
header_redact = client._process_header(name, value)
|
||||
self.assertEqual(header, header_redact)
|
@ -1,59 +0,0 @@
|
||||
# Copyright 2015 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
from six.moves import http_client
|
||||
|
||||
from ironicclient.common.apiclient import exceptions
|
||||
from ironicclient import exc
|
||||
from ironicclient.tests.unit import utils as test_utils
|
||||
|
||||
|
||||
@mock.patch.object(exceptions, 'from_response', autospec=True)
|
||||
class ExcTest(test_utils.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(ExcTest, self).setUp()
|
||||
self.message = 'SpongeBob SquarePants'
|
||||
self.traceback = 'Foo Traceback'
|
||||
self.method = 'call_spongebob'
|
||||
self.url = 'http://foo.bar'
|
||||
self.expected_json = {'error': {'message': self.message,
|
||||
'details': self.traceback}}
|
||||
|
||||
def test_from_response(self, mock_apiclient):
|
||||
fake_response = mock.Mock(status_code=http_client.BAD_REQUEST)
|
||||
exc.from_response(fake_response, message=self.message,
|
||||
traceback=self.traceback, method=self.method,
|
||||
url=self.url)
|
||||
self.assertEqual(http_client.BAD_REQUEST, fake_response.status_code)
|
||||
self.assertEqual(self.expected_json, fake_response.json())
|
||||
mock_apiclient.assert_called_once_with(
|
||||
fake_response, method=self.method, url=self.url)
|
||||
|
||||
def test_from_response_status(self, mock_apiclient):
|
||||
fake_response = mock.Mock(status=http_client.BAD_REQUEST)
|
||||
fake_response.getheader.return_value = 'fake-header'
|
||||
delattr(fake_response, 'status_code')
|
||||
|
||||
exc.from_response(fake_response, message=self.message,
|
||||
traceback=self.traceback, method=self.method,
|
||||
url=self.url)
|
||||
expected_header = {'Content-Type': 'fake-header'}
|
||||
self.assertEqual(expected_header, fake_response.headers)
|
||||
self.assertEqual(http_client.BAD_REQUEST, fake_response.status_code)
|
||||
self.assertEqual(self.expected_json, fake_response.json())
|
||||
mock_apiclient.assert_called_once_with(
|
||||
fake_response, method=self.method, url=self.url)
|
@ -1,37 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from ironicclient.tests.unit import utils
|
||||
|
||||
module_str = 'ironicclient'
|
||||
|
||||
|
||||
class ImportTest(utils.BaseTestCase):
|
||||
|
||||
def check_exported_symbols(self, exported_symbols):
|
||||
self.assertIn('client', exported_symbols)
|
||||
self.assertIn('exc', exported_symbols)
|
||||
self.assertIn('exceptions', exported_symbols)
|
||||
|
||||
def test_import_objects(self):
|
||||
module = __import__(module_str)
|
||||
exported_symbols = dir(module)
|
||||
self.check_exported_symbols(exported_symbols)
|
||||
|
||||
def test_default_import(self):
|
||||
default_imports = __import__(module_str, globals(), locals(), ['*'])
|
||||
exported_symbols = dir(default_imports)
|
||||
self.check_exported_symbols(exported_symbols)
|
||||
|
||||
def test_import__all__(self):
|
||||
module = __import__(module_str)
|
||||
self.check_exported_symbols(module.__all__)
|
@ -1,398 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
import fixtures
|
||||
from keystoneauth1 import exceptions as keystone_exc
|
||||
from keystoneauth1 import fixture as ks_fixture
|
||||
import mock
|
||||
from oslo_utils import uuidutils
|
||||
import requests_mock
|
||||
import six
|
||||
import testtools
|
||||
from testtools import matchers
|
||||
|
||||
from ironicclient import client
|
||||
from ironicclient.common.apiclient import exceptions
|
||||
from ironicclient.common import http
|
||||
from ironicclient import exc
|
||||
from ironicclient import shell as ironic_shell
|
||||
from ironicclient.tests.unit import utils
|
||||
|
||||
BASE_URL = 'http://no.where:5000'
|
||||
V2_URL = BASE_URL + '/v2.0'
|
||||
V3_URL = BASE_URL + '/v3'
|
||||
|
||||
FAKE_ENV = {'OS_USERNAME': 'username',
|
||||
'OS_PASSWORD': 'password',
|
||||
'OS_PROJECT_NAME': 'project_name',
|
||||
'OS_AUTH_URL': V2_URL}
|
||||
|
||||
FAKE_ENV_KEYSTONE_V2 = {
|
||||
'OS_USERNAME': 'username',
|
||||
'OS_PASSWORD': 'password',
|
||||
'OS_PROJECT_NAME': 'project_name',
|
||||
'OS_AUTH_URL': V2_URL
|
||||
}
|
||||
|
||||
FAKE_ENV_KEYSTONE_V3 = {
|
||||
'OS_USERNAME': 'username',
|
||||
'OS_PASSWORD': 'password',
|
||||
'OS_PROJECT_NAME': 'project_name',
|
||||
'OS_AUTH_URL': V3_URL,
|
||||
'OS_USER_DOMAIN_ID': 'default',
|
||||
'OS_PROJECT_DOMAIN_ID': 'default',
|
||||
}
|
||||
|
||||
FAKE_ENV_KEYSTONE_V2_TOKEN = {
|
||||
'OS_AUTH_TOKEN': 'admin_token',
|
||||
'OS_PROJECT_NAME': 'project_name',
|
||||
'OS_AUTH_URL': V2_URL
|
||||
}
|
||||
|
||||
|
||||
class ShellTest(utils.BaseTestCase):
|
||||
re_options = re.DOTALL | re.MULTILINE
|
||||
|
||||
# Patch os.environ to avoid required auth info.
|
||||
def make_env(self, exclude=None, environ_dict=FAKE_ENV):
|
||||
env = dict((k, v) for k, v in environ_dict.items() if k != exclude)
|
||||
self.useFixture(fixtures.MonkeyPatch('os.environ', env))
|
||||
|
||||
def setUp(self):
|
||||
super(ShellTest, self).setUp()
|
||||
|
||||
def shell(self, argstr):
|
||||
with mock.patch.object(sys, 'stdout', six.StringIO()):
|
||||
with mock.patch.object(sys, 'stderr', six.StringIO()):
|
||||
try:
|
||||
_shell = ironic_shell.IronicShell()
|
||||
_shell.main(argstr.split())
|
||||
except SystemExit:
|
||||
exc_type, exc_value, exc_traceback = sys.exc_info()
|
||||
self.assertEqual(0, exc_value.code)
|
||||
finally:
|
||||
out = sys.stdout.getvalue()
|
||||
err = sys.stderr.getvalue()
|
||||
return out, err
|
||||
|
||||
def test_help_unknown_command(self):
|
||||
self.assertRaises(exc.CommandError, self.shell, 'help foofoo')
|
||||
|
||||
def test_help(self):
|
||||
required = [
|
||||
'.*?^usage: ironic',
|
||||
'.*?^ +bash-completion',
|
||||
'.*?^See "ironic help COMMAND" '
|
||||
'for help on a specific command',
|
||||
]
|
||||
for argstr in ['--help', 'help']:
|
||||
help_text = self.shell(argstr)[0]
|
||||
for r in required:
|
||||
self.assertThat(help_text,
|
||||
matchers.MatchesRegex(r,
|
||||
self.re_options))
|
||||
|
||||
def test_help_on_subcommand(self):
|
||||
required = [
|
||||
".*?^usage: ironic chassis-show",
|
||||
".*?^Show detailed information about a chassis",
|
||||
]
|
||||
argstrings = [
|
||||
'help chassis-show',
|
||||
]
|
||||
for argstr in argstrings:
|
||||
help_text = self.shell(argstr)[0]
|
||||
for r in required:
|
||||
self.assertThat(help_text,
|
||||
matchers.MatchesRegex(r, self.re_options))
|
||||
|
||||
def test_required_args_on_node_create_help(self):
|
||||
required = [
|
||||
".*?^usage: ironic node-create",
|
||||
".*?^Register a new node with the Ironic service",
|
||||
".*?^Required arguments:",
|
||||
]
|
||||
argstrings = [
|
||||
'help node-create',
|
||||
]
|
||||
for argstr in argstrings:
|
||||
help_text = self.shell(argstr)[0]
|
||||
for r in required:
|
||||
self.assertThat(help_text,
|
||||
matchers.MatchesRegex(r, self.re_options))
|
||||
|
||||
def test_required_args_on_port_create_help(self):
|
||||
required = [
|
||||
".*?^usage: ironic port-create",
|
||||
".*?^Create a new port",
|
||||
".*?^Required arguments:",
|
||||
]
|
||||
argstrings = [
|
||||
'help port-create',
|
||||
]
|
||||
for argstr in argstrings:
|
||||
help_text = self.shell(argstr)[0]
|
||||
for r in required:
|
||||
self.assertThat(help_text,
|
||||
matchers.MatchesRegex(r, self.re_options))
|
||||
|
||||
def test_auth_param(self):
|
||||
self.make_env(exclude='OS_USERNAME')
|
||||
self.test_help()
|
||||
|
||||
@mock.patch.object(client, 'get_client', autospec=True,
|
||||
side_effect=keystone_exc.ConnectFailure)
|
||||
@mock.patch('sys.stdin', side_effect=mock.MagicMock, autospec=True)
|
||||
@mock.patch('getpass.getpass', return_value='password', autospec=True)
|
||||
def test_password_prompted(self, mock_getpass, mock_stdin, mock_client):
|
||||
self.make_env(exclude='OS_PASSWORD')
|
||||
# We will get a ConnectFailure because there is no keystone.
|
||||
self.assertRaises(keystone_exc.ConnectFailure,
|
||||
self.shell, 'node-list')
|
||||
expected_kwargs = {
|
||||
'ironic_url': '', 'os_auth_url': FAKE_ENV['OS_AUTH_URL'],
|
||||
'os_tenant_id': '', 'os_tenant_name': '',
|
||||
'os_username': FAKE_ENV['OS_USERNAME'], 'os_user_domain_id': '',
|
||||
'os_user_domain_name': '', 'os_password': FAKE_ENV['OS_PASSWORD'],
|
||||
'os_auth_token': '', 'os_project_id': '',
|
||||
'os_project_name': FAKE_ENV['OS_PROJECT_NAME'],
|
||||
'os_project_domain_id': '',
|
||||
'os_project_domain_name': '', 'os_region_name': '',
|
||||
'os_service_type': '', 'os_endpoint_type': '', 'os_cacert': None,
|
||||
'os_cert': None, 'os_key': None,
|
||||
'max_retries': http.DEFAULT_MAX_RETRIES,
|
||||
'retry_interval': http.DEFAULT_RETRY_INTERVAL,
|
||||
'os_ironic_api_version': None, 'timeout': 600, 'insecure': False
|
||||
}
|
||||
mock_client.assert_called_once_with(1, **expected_kwargs)
|
||||
# Make sure we are actually prompted.
|
||||
mock_getpass.assert_called_with('OpenStack Password: ')
|
||||
|
||||
@mock.patch.object(client, 'get_client', autospec=True,
|
||||
side_effect=keystone_exc.ConnectFailure)
|
||||
@mock.patch('getpass.getpass', return_value='password', autospec=True)
|
||||
def test_token_auth(self, mock_getpass, mock_client):
|
||||
self.make_env(environ_dict=FAKE_ENV_KEYSTONE_V2_TOKEN)
|
||||
# We will get a ConnectFailure because there is no keystone.
|
||||
self.assertRaises(keystone_exc.ConnectFailure,
|
||||
self.shell, 'node-list')
|
||||
expected_kwargs = {
|
||||
'ironic_url': '',
|
||||
'os_auth_url': FAKE_ENV_KEYSTONE_V2_TOKEN['OS_AUTH_URL'],
|
||||
'os_tenant_id': '',
|
||||
'os_tenant_name': '',
|
||||
'os_username': '', 'os_user_domain_id': '',
|
||||
'os_user_domain_name': '', 'os_password': '',
|
||||
'os_auth_token': FAKE_ENV_KEYSTONE_V2_TOKEN['OS_AUTH_TOKEN'],
|
||||
'os_project_id': '',
|
||||
'os_project_name': FAKE_ENV_KEYSTONE_V2_TOKEN['OS_PROJECT_NAME'],
|
||||
'os_project_domain_id': '', 'os_project_domain_name': '',
|
||||
'os_region_name': '', 'os_service_type': '',
|
||||
'os_endpoint_type': '', 'os_cacert': None, 'os_cert': None,
|
||||
'os_key': None, 'max_retries': http.DEFAULT_MAX_RETRIES,
|
||||
'retry_interval': http.DEFAULT_RETRY_INTERVAL,
|
||||
'os_ironic_api_version': None, 'timeout': 600, 'insecure': False
|
||||
}
|
||||
mock_client.assert_called_once_with(1, **expected_kwargs)
|
||||
self.assertFalse(mock_getpass.called)
|
||||
|
||||
@mock.patch('sys.stdin', side_effect=mock.MagicMock, autospec=True)
|
||||
@mock.patch('getpass.getpass', side_effect=EOFError, autospec=True)
|
||||
def test_password_prompted_ctrlD(self, mock_getpass, mock_stdin):
|
||||
self.make_env(exclude='OS_PASSWORD')
|
||||
# We should get Command Error because we mock Ctl-D.
|
||||
self.assertRaises(exc.CommandError,
|
||||
self.shell, 'node-list')
|
||||
# Make sure we are actually prompted.
|
||||
mock_getpass.assert_called_with('OpenStack Password: ')
|
||||
|
||||
@mock.patch('sys.stdin', autospec=True)
|
||||
def test_no_password_no_tty(self, mock_stdin):
|
||||
# delete the isatty attribute so that we do not get
|
||||
# prompted when manually running the tests
|
||||
del mock_stdin.isatty
|
||||
required = ('You must provide a password'
|
||||
' via either --os-password, env[OS_PASSWORD],'
|
||||
' or prompted response',)
|
||||
self.make_env(exclude='OS_PASSWORD')
|
||||
try:
|
||||
self.shell('node-list')
|
||||
except exc.CommandError as message:
|
||||
self.assertEqual(required, message.args)
|
||||
else:
|
||||
self.fail('CommandError not raised')
|
||||
|
||||
def test_bash_completion(self):
|
||||
stdout = self.shell('bash-completion')[0]
|
||||
# just check we have some output
|
||||
required = [
|
||||
'.*--driver_info',
|
||||
'.*--chassis_uuid',
|
||||
'.*help',
|
||||
'.*node-create',
|
||||
'.*chassis-create']
|
||||
for r in required:
|
||||
self.assertThat(stdout,
|
||||
matchers.MatchesRegex(r, self.re_options))
|
||||
|
||||
def test_ironic_api_version(self):
|
||||
err = self.shell('--ironic-api-version 1.2 help')[1]
|
||||
self.assertFalse(err)
|
||||
|
||||
err = self.shell('--ironic-api-version latest help')[1]
|
||||
self.assertFalse(err)
|
||||
|
||||
self.assertRaises(exc.CommandError,
|
||||
self.shell, '--ironic-api-version 1.2.1 help')
|
||||
|
||||
def test_invalid_ironic_api_version(self):
|
||||
self.assertRaises(exceptions.UnsupportedVersion,
|
||||
self.shell, '--ironic-api-version 0.8 help')
|
||||
|
||||
def test_warning_on_no_version(self):
|
||||
err = self.shell('help')[1]
|
||||
self.assertIn('You are using the default API version', err)
|
||||
|
||||
|
||||
class TestCase(testtools.TestCase):
|
||||
|
||||
def set_fake_env(self, fake_env):
|
||||
client_env = ('OS_USERNAME', 'OS_PASSWORD', 'OS_PROJECT_ID',
|
||||
'OS_PROJECT_NAME', 'OS_AUTH_URL', 'OS_REGION_NAME',
|
||||
'OS_AUTH_TOKEN', 'OS_NO_CLIENT_AUTH', 'OS_SERVICE_TYPE',
|
||||
'OS_ENDPOINT_TYPE', 'OS_CACERT', 'OS_CERT', 'OS_KEY')
|
||||
|
||||
for key in client_env:
|
||||
self.useFixture(
|
||||
fixtures.EnvironmentVariable(key, fake_env.get(key)))
|
||||
|
||||
# required for testing with Python 2.6
|
||||
def assertRegexpMatches(self, text, expected_regexp, msg=None):
|
||||
"""Fail the test unless the text matches the regular expression."""
|
||||
if isinstance(expected_regexp, six.string_types):
|
||||
expected_regexp = re.compile(expected_regexp)
|
||||
if not expected_regexp.search(text):
|
||||
msg = msg or "Regexp didn't match"
|
||||
msg = '%s: %r not found in %r' % (
|
||||
msg, expected_regexp.pattern, text)
|
||||
raise self.failureException(msg)
|
||||
|
||||
def register_keystone_v2_token_fixture(self, request_mocker):
|
||||
v2_token = ks_fixture.V2Token()
|
||||
service = v2_token.add_service('baremetal')
|
||||
service.add_endpoint('http://ironic.example.com', region='RegionOne')
|
||||
request_mocker.post('%s/tokens' % V2_URL,
|
||||
json=v2_token)
|
||||
|
||||
def register_keystone_v3_token_fixture(self, request_mocker):
|
||||
v3_token = ks_fixture.V3Token()
|
||||
service = v3_token.add_service('baremetal')
|
||||
service.add_standard_endpoints(public='http://ironic.example.com')
|
||||
request_mocker.post(
|
||||
'%s/auth/tokens' % V3_URL,
|
||||
json=v3_token,
|
||||
headers={'X-Subject-Token': uuidutils.generate_uuid()})
|
||||
|
||||
def register_keystone_auth_fixture(self, request_mocker):
|
||||
self.register_keystone_v2_token_fixture(request_mocker)
|
||||
self.register_keystone_v3_token_fixture(request_mocker)
|
||||
|
||||
request_mocker.get(V2_URL, json=ks_fixture.V2Discovery(V2_URL))
|
||||
request_mocker.get(V3_URL, json=ks_fixture.V3Discovery(V3_URL))
|
||||
request_mocker.get(BASE_URL, json=ks_fixture.DiscoveryList(BASE_URL))
|
||||
|
||||
|
||||
class ShellTestNoMox(TestCase):
|
||||
def setUp(self):
|
||||
super(ShellTestNoMox, self).setUp()
|
||||
self.set_fake_env(FAKE_ENV_KEYSTONE_V2)
|
||||
|
||||
def shell(self, argstr):
|
||||
orig = sys.stdout
|
||||
try:
|
||||
sys.stdout = six.StringIO()
|
||||
_shell = ironic_shell.IronicShell()
|
||||
_shell.main(argstr.split())
|
||||
self.subcommands = _shell.subcommands.keys()
|
||||
except SystemExit:
|
||||
exc_type, exc_value, exc_traceback = sys.exc_info()
|
||||
self.assertEqual(0, exc_value.code)
|
||||
finally:
|
||||
out = sys.stdout.getvalue()
|
||||
sys.stdout.close()
|
||||
sys.stdout = orig
|
||||
|
||||
return out
|
||||
|
||||
@requests_mock.mock()
|
||||
def test_node_list(self, request_mocker):
|
||||
self.register_keystone_auth_fixture(request_mocker)
|
||||
resp_dict = {"nodes": [
|
||||
{"instance_uuid": "null",
|
||||
"uuid": "351a82d6-9f04-4c36-b79a-a38b9e98ff71",
|
||||
"links": [{"href": "http://ironic.example.com:6385/"
|
||||
"v1/nodes/foo",
|
||||
"rel": "self"},
|
||||
{"href": "http://ironic.example.com:6385/"
|
||||
"nodes/foo",
|
||||
"rel": "bookmark"}],
|
||||
"maintenance": "false",
|
||||
"provision_state": "null",
|
||||
"power_state": "power off"},
|
||||
{"instance_uuid": "null",
|
||||
"uuid": "66fbba13-29e8-4b8a-9e80-c655096a40d3",
|
||||
"links": [{"href": "http://ironic.example.com:6385/"
|
||||
"v1/nodes/foo2",
|
||||
"rel": "self"},
|
||||
{"href": "http://ironic.example.com:6385/"
|
||||
"nodes/foo2",
|
||||
"rel": "bookmark"}],
|
||||
"maintenance": "false",
|
||||
"provision_state": "null",
|
||||
"power_state": "power off"}]}
|
||||
headers = {'Content-Type': 'application/json; charset=UTF-8'}
|
||||
request_mocker.get('http://ironic.example.com/v1/nodes',
|
||||
headers=headers,
|
||||
json=resp_dict)
|
||||
|
||||
event_list_text = self.shell('node-list')
|
||||
|
||||
required = [
|
||||
'351a82d6-9f04-4c36-b79a-a38b9e98ff71',
|
||||
'66fbba13-29e8-4b8a-9e80-c655096a40d3',
|
||||
]
|
||||
|
||||
for r in required:
|
||||
self.assertRegexpMatches(event_list_text, r)
|
||||
|
||||
|
||||
class ShellTestNoMoxV3(ShellTestNoMox):
|
||||
|
||||
def _set_fake_env(self):
|
||||
self.set_fake_env(FAKE_ENV_KEYSTONE_V3)
|
||||
|
||||
|
||||
class ShellParserTest(TestCase):
|
||||
def test_deprecated_defaults(self):
|
||||
cert_env = {}
|
||||
cert_env['OS_CACERT'] = '/fake/cacert.pem'
|
||||
cert_env['OS_CERT'] = '/fake/cert.pem'
|
||||
cert_env['OS_KEY'] = '/fake/key.pem'
|
||||
self.set_fake_env(cert_env)
|
||||
parser = ironic_shell.IronicShell().get_base_parser()
|
||||
options, _ = parser.parse_known_args([])
|
||||
self.assertEqual(cert_env['OS_CACERT'], options.os_cacert)
|
||||
self.assertEqual(cert_env['OS_CERT'], options.os_cert)
|
||||
self.assertEqual(cert_env['OS_KEY'], options.os_key)
|
@ -1,132 +0,0 @@
|
||||
# Copyright 2012 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import os
|
||||
|
||||
import fixtures
|
||||
import mock
|
||||
from oslo_utils import strutils
|
||||
import requests
|
||||
import six
|
||||
import testtools
|
||||
|
||||
|
||||
class BaseTestCase(testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(BaseTestCase, self).setUp()
|
||||
self.useFixture(fixtures.FakeLogger())
|
||||
|
||||
# If enabled, stdout and/or stderr is captured and will appear in
|
||||
# test results if that test fails.
|
||||
if strutils.bool_from_string(os.environ.get('OS_STDOUT_CAPTURE')):
|
||||
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
|
||||
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
|
||||
if strutils.bool_from_string(os.environ.get('OS_STDERR_CAPTURE')):
|
||||
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
|
||||
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
|
||||
|
||||
|
||||
class FakeAPI(object):
|
||||
def __init__(self, responses):
|
||||
self.responses = responses
|
||||
self.calls = []
|
||||
|
||||
def _request(self, method, url, headers=None, body=None):
|
||||
call = (method, url, headers or {}, body)
|
||||
self.calls.append(call)
|
||||
return self.responses[url][method]
|
||||
|
||||
def raw_request(self, *args, **kwargs):
|
||||
response = self._request(*args, **kwargs)
|
||||
body_iter = iter(six.StringIO(response[1]))
|
||||
return FakeResponse(response[0]), body_iter
|
||||
|
||||
def json_request(self, *args, **kwargs):
|
||||
response = self._request(*args, **kwargs)
|
||||
return FakeResponse(response[0]), response[1]
|
||||
|
||||
|
||||
class FakeConnection(object):
|
||||
def __init__(self, response=None):
|
||||
self._response = response
|
||||
self._last_request = None
|
||||
|
||||
def request(self, method, conn_url, **kwargs):
|
||||
self._last_request = (method, conn_url, kwargs)
|
||||
|
||||
def setresponse(self, response):
|
||||
self._response = response
|
||||
|
||||
def getresponse(self):
|
||||
return self._response
|
||||
|
||||
def __repr__(self):
|
||||
return ("FakeConnection(response=%s)" % (self._response))
|
||||
|
||||
|
||||
class FakeResponse(object):
|
||||
def __init__(self, headers, body=None, version=None, status=None,
|
||||
reason=None):
|
||||
"""Fake object to help testing.
|
||||
|
||||
:param headers: dict representing HTTP response headers
|
||||
:param body: file-like object
|
||||
"""
|
||||
self.headers = headers
|
||||
self.body = body
|
||||
self.raw = mock.Mock()
|
||||
self.raw.version = version
|
||||
self.status_code = status
|
||||
self.reason = reason
|
||||
|
||||
def getheaders(self):
|
||||
return copy.deepcopy(self.headers).items()
|
||||
|
||||
def getheader(self, key, default):
|
||||
return self.headers.get(key, default)
|
||||
|
||||
def read(self, amt):
|
||||
return self.body.read(amt)
|
||||
|
||||
def __repr__(self):
|
||||
return ("FakeResponse(%s, body=%s, version=%s, status=%s, reason=%s)" %
|
||||
(self.headers, self.body, self.version, self.status,
|
||||
self.reason))
|
||||
|
||||
|
||||
def mockSessionResponse(headers, content=None, status_code=None, version=None):
|
||||
raw = mock.Mock()
|
||||
raw.version = version
|
||||
response = mock.Mock(spec=requests.Response,
|
||||
headers=headers,
|
||||
content=content,
|
||||
status_code=status_code,
|
||||
raw=raw,
|
||||
reason='',
|
||||
encoding='UTF-8')
|
||||
response.text = content
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def mockSession(headers, content=None, status_code=None, version=None):
|
||||
session = mock.Mock(spec=requests.Session,
|
||||
verify=False,
|
||||
cert=('test_cert', 'test_key'))
|
||||
response = mockSessionResponse(headers, content, status_code, version)
|
||||
session.request = mock.Mock(return_value=response)
|
||||
|
||||
return session
|
@ -1,463 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import testtools
|
||||
from testtools.matchers import HasLength
|
||||
|
||||
from ironicclient import exc
|
||||
from ironicclient.tests.unit import utils
|
||||
import ironicclient.v1.chassis
|
||||
|
||||
CHASSIS = {'uuid': 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee',
|
||||
'extra': {},
|
||||
'description': 'data-center-1-chassis'}
|
||||
|
||||
CHASSIS2 = {'uuid': 'eeeeeeee-dddd-cccc-bbbb-aaaaaaaaaaaa',
|
||||
'extra': {},
|
||||
'description': 'data-center-1-chassis'}
|
||||
|
||||
|
||||
NODE = {'uuid': '66666666-7777-8888-9999-000000000000',
|
||||
'chassis_uuid': 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee',
|
||||
'provision_state': 'available',
|
||||
'driver': 'fake',
|
||||
'driver_info': {'user': 'foo', 'password': 'bar'},
|
||||
'properties': {'num_cpu': 4},
|
||||
'extra': {}}
|
||||
|
||||
CREATE_CHASSIS = copy.deepcopy(CHASSIS)
|
||||
del CREATE_CHASSIS['uuid']
|
||||
|
||||
CREATE_WITH_UUID = copy.deepcopy(CHASSIS)
|
||||
|
||||
UPDATED_CHASSIS = copy.deepcopy(CHASSIS)
|
||||
NEW_DESCR = 'new-description'
|
||||
UPDATED_CHASSIS['description'] = NEW_DESCR
|
||||
|
||||
fake_responses = {
|
||||
'/v1/chassis':
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"chassis": [CHASSIS]},
|
||||
),
|
||||
'POST': (
|
||||
{},
|
||||
CREATE_CHASSIS,
|
||||
),
|
||||
},
|
||||
'/v1/chassis/detail':
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"chassis": [CHASSIS]},
|
||||
),
|
||||
},
|
||||
'/v1/chassis/?fields=uuid,extra':
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"chassis": [CHASSIS]},
|
||||
),
|
||||
},
|
||||
'/v1/chassis/%s' % CHASSIS['uuid']:
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
CHASSIS,
|
||||
),
|
||||
'DELETE': (
|
||||
{},
|
||||
None,
|
||||
),
|
||||
'PATCH': (
|
||||
{},
|
||||
UPDATED_CHASSIS,
|
||||
),
|
||||
},
|
||||
'/v1/chassis/%s?fields=uuid,description' % CHASSIS['uuid']:
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
CHASSIS,
|
||||
),
|
||||
},
|
||||
'/v1/chassis/%s/nodes' % CHASSIS['uuid']:
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"nodes": [NODE]},
|
||||
),
|
||||
},
|
||||
'/v1/chassis/%s/nodes/detail' % CHASSIS['uuid']:
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"nodes": [NODE]},
|
||||
),
|
||||
},
|
||||
'/v1/chassis/%s/nodes?fields=uuid,extra' % CHASSIS['uuid']:
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"nodes": [NODE]},
|
||||
),
|
||||
},
|
||||
'/v1/chassis/%s/nodes?associated=True' % CHASSIS['uuid']:
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"nodes": [NODE]},
|
||||
)
|
||||
},
|
||||
'/v1/chassis/%s/nodes?maintenance=False' % CHASSIS['uuid']:
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"nodes": [NODE]},
|
||||
)
|
||||
},
|
||||
'/v1/chassis/%s/nodes?provision_state=available' % CHASSIS['uuid']:
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"nodes": [NODE]},
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
fake_responses_pagination = {
|
||||
'/v1/chassis':
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"chassis": [CHASSIS],
|
||||
"next": "http://127.0.0.1:6385/v1/chassis/?limit=1"}
|
||||
),
|
||||
},
|
||||
'/v1/chassis/?limit=1':
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"chassis": [CHASSIS2]}
|
||||
),
|
||||
},
|
||||
'/v1/chassis/?marker=%s' % CHASSIS['uuid']:
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"chassis": [CHASSIS2]}
|
||||
),
|
||||
},
|
||||
'/v1/chassis/%s/nodes?limit=1' % CHASSIS['uuid']:
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"nodes": [NODE]},
|
||||
),
|
||||
},
|
||||
'/v1/chassis/%s/nodes?marker=%s' % (CHASSIS['uuid'], NODE['uuid']):
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"nodes": [NODE]},
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
fake_responses_sorting = {
|
||||
'/v1/chassis/?sort_key=updated_at':
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"chassis": [CHASSIS2]}
|
||||
),
|
||||
},
|
||||
'/v1/chassis/?sort_dir=desc':
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"chassis": [CHASSIS2]}
|
||||
),
|
||||
},
|
||||
'/v1/chassis/%s/nodes?sort_key=updated_at' % CHASSIS['uuid']:
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"nodes": [NODE]},
|
||||
),
|
||||
},
|
||||
'/v1/chassis/%s/nodes?sort_dir=desc' % CHASSIS['uuid']:
|
||||
{
|
||||
'GET': (
|
||||
{},
|
||||
{"nodes": [NODE]},
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class ChassisManagerTest(testtools.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(ChassisManagerTest, self).setUp()
|
||||
self.api = utils.FakeAPI(fake_responses)
|
||||
self.mgr = ironicclient.v1.chassis.ChassisManager(self.api)
|
||||
|
||||
def test_chassis_list(self):
|
||||
chassis = self.mgr.list()
|
||||
expect = [
|
||||
('GET', '/v1/chassis', {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(1, len(chassis))
|
||||
|
||||
def test_chassis_list_limit(self):
|
||||
self.api = utils.FakeAPI(fake_responses_pagination)
|
||||
self.mgr = ironicclient.v1.chassis.ChassisManager(self.api)
|
||||
chassis = self.mgr.list(limit=1)
|
||||
expect = [
|
||||
('GET', '/v1/chassis/?limit=1', {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertThat(chassis, HasLength(1))
|
||||
|
||||
def test_chassis_list_marker(self):
|
||||
self.api = utils.FakeAPI(fake_responses_pagination)
|
||||
self.mgr = ironicclient.v1.chassis.ChassisManager(self.api)
|
||||
chassis = self.mgr.list(marker=CHASSIS['uuid'])
|
||||
expect = [
|
||||
('GET', '/v1/chassis/?marker=%s' % CHASSIS['uuid'], {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertThat(chassis, HasLength(1))
|
||||
|
||||
def test_chassis_list_pagination_no_limit(self):
|
||||
self.api = utils.FakeAPI(fake_responses_pagination)
|
||||
self.mgr = ironicclient.v1.chassis.ChassisManager(self.api)
|
||||
chassis = self.mgr.list(limit=0)
|
||||
expect = [
|
||||
('GET', '/v1/chassis', {}, None),
|
||||
('GET', '/v1/chassis/?limit=1', {}, None)
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertThat(chassis, HasLength(2))
|
||||
|
||||
def test_chassis_list_sort_key(self):
|
||||
self.api = utils.FakeAPI(fake_responses_sorting)
|
||||
self.mgr = ironicclient.v1.chassis.ChassisManager(self.api)
|
||||
chassis = self.mgr.list(sort_key='updated_at')
|
||||
expect = [
|
||||
('GET', '/v1/chassis/?sort_key=updated_at', {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertThat(chassis, HasLength(1))
|
||||
|
||||
def test_chassis_list_sort_dir(self):
|
||||
self.api = utils.FakeAPI(fake_responses_sorting)
|
||||
self.mgr = ironicclient.v1.chassis.ChassisManager(self.api)
|
||||
chassis = self.mgr.list(sort_dir='desc')
|
||||
expect = [
|
||||
('GET', '/v1/chassis/?sort_dir=desc', {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertThat(chassis, HasLength(1))
|
||||
|
||||
def test_chassis_list_detail(self):
|
||||
chassis = self.mgr.list(detail=True)
|
||||
expect = [
|
||||
('GET', '/v1/chassis/detail', {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(1, len(chassis))
|
||||
|
||||
def test_chassis_list_fields(self):
|
||||
nodes = self.mgr.list(fields=['uuid', 'extra'])
|
||||
expect = [
|
||||
('GET', '/v1/chassis/?fields=uuid,extra', {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(1, len(nodes))
|
||||
|
||||
def test_chassis_list_detail_and_fields_fail(self):
|
||||
self.assertRaises(exc.InvalidAttribute, self.mgr.list,
|
||||
detail=True, fields=['uuid', 'extra'])
|
||||
|
||||
def test_chassis_show(self):
|
||||
chassis = self.mgr.get(CHASSIS['uuid'])
|
||||
expect = [
|
||||
('GET', '/v1/chassis/%s' % CHASSIS['uuid'], {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(CHASSIS['uuid'], chassis.uuid)
|
||||
self.assertEqual(CHASSIS['description'], chassis.description)
|
||||
|
||||
def test_chassis_show_fields(self):
|
||||
chassis = self.mgr.get(CHASSIS['uuid'], fields=['uuid', 'description'])
|
||||
expect = [
|
||||
('GET', '/v1/chassis/%s?fields=uuid,description' %
|
||||
CHASSIS['uuid'], {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(CHASSIS['uuid'], chassis.uuid)
|
||||
self.assertEqual(CHASSIS['description'], chassis.description)
|
||||
|
||||
def test_create(self):
|
||||
chassis = self.mgr.create(**CREATE_CHASSIS)
|
||||
expect = [
|
||||
('POST', '/v1/chassis', {}, CREATE_CHASSIS),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertTrue(chassis)
|
||||
|
||||
def test_create_with_uuid(self):
|
||||
chassis = self.mgr.create(**CREATE_WITH_UUID)
|
||||
expect = [
|
||||
('POST', '/v1/chassis', {}, CREATE_WITH_UUID),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertTrue(chassis)
|
||||
|
||||
def test_delete(self):
|
||||
chassis = self.mgr.delete(chassis_id=CHASSIS['uuid'])
|
||||
expect = [
|
||||
('DELETE', '/v1/chassis/%s' % CHASSIS['uuid'], {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertIsNone(chassis)
|
||||
|
||||
def test_update(self):
|
||||
patch = {'op': 'replace',
|
||||
'value': NEW_DESCR,
|
||||
'path': '/description'}
|
||||
chassis = self.mgr.update(chassis_id=CHASSIS['uuid'], patch=patch)
|
||||
expect = [
|
||||
('PATCH', '/v1/chassis/%s' % CHASSIS['uuid'], {}, patch),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(NEW_DESCR, chassis.description)
|
||||
|
||||
def test_chassis_node_list(self):
|
||||
nodes = self.mgr.list_nodes(CHASSIS['uuid'])
|
||||
expect = [
|
||||
('GET', '/v1/chassis/%s/nodes' % CHASSIS['uuid'], {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(1, len(nodes))
|
||||
self.assertEqual(NODE['uuid'], nodes[0].uuid)
|
||||
|
||||
def test_chassis_node_list_detail(self):
|
||||
nodes = self.mgr.list_nodes(CHASSIS['uuid'], detail=True)
|
||||
expect = [
|
||||
('GET', '/v1/chassis/%s/nodes/detail' % CHASSIS['uuid'], {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(1, len(nodes))
|
||||
self.assertEqual(NODE['uuid'], nodes[0].uuid)
|
||||
|
||||
def test_chassis_node_list_fields(self):
|
||||
nodes = self.mgr.list_nodes(CHASSIS['uuid'], fields=['uuid', 'extra'])
|
||||
expect = [
|
||||
('GET', '/v1/chassis/%s/nodes?fields=uuid,extra' %
|
||||
CHASSIS['uuid'], {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(1, len(nodes))
|
||||
|
||||
def test_chassis_node_list_maintenance(self):
|
||||
nodes = self.mgr.list_nodes(CHASSIS['uuid'], maintenance=False)
|
||||
expect = [
|
||||
('GET', '/v1/chassis/%s/nodes?maintenance=False' %
|
||||
CHASSIS['uuid'], {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(1, len(nodes))
|
||||
|
||||
def test_chassis_node_list_associated(self):
|
||||
nodes = self.mgr.list_nodes(CHASSIS['uuid'], associated=True)
|
||||
expect = [
|
||||
('GET', '/v1/chassis/%s/nodes?associated=True' %
|
||||
CHASSIS['uuid'], {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(1, len(nodes))
|
||||
|
||||
def test_chassis_node_list_provision_state(self):
|
||||
nodes = self.mgr.list_nodes(CHASSIS['uuid'],
|
||||
provision_state="available")
|
||||
expect = [
|
||||
('GET', '/v1/chassis/%s/nodes?provision_state=available' %
|
||||
CHASSIS['uuid'], {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertEqual(1, len(nodes))
|
||||
|
||||
def test_chassis_node_list_detail_and_fields_fail(self):
|
||||
self.assertRaises(exc.InvalidAttribute, self.mgr.list_nodes,
|
||||
CHASSIS['uuid'], detail=True,
|
||||
fields=['uuid', 'extra'])
|
||||
|
||||
def test_chassis_node_list_limit(self):
|
||||
self.api = utils.FakeAPI(fake_responses_pagination)
|
||||
self.mgr = ironicclient.v1.chassis.ChassisManager(self.api)
|
||||
nodes = self.mgr.list_nodes(CHASSIS['uuid'], limit=1)
|
||||
expect = [
|
||||
('GET',
|
||||
'/v1/chassis/%s/nodes?limit=1' % CHASSIS['uuid'], {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertThat(nodes, HasLength(1))
|
||||
self.assertEqual(NODE['uuid'], nodes[0].uuid)
|
||||
|
||||
def test_chassis_node_list_sort_key(self):
|
||||
self.api = utils.FakeAPI(fake_responses_sorting)
|
||||
self.mgr = ironicclient.v1.chassis.ChassisManager(self.api)
|
||||
nodes = self.mgr.list_nodes(CHASSIS['uuid'], sort_key='updated_at')
|
||||
expect = [
|
||||
('GET',
|
||||
'/v1/chassis/%s/nodes?sort_key=updated_at' % CHASSIS['uuid'], {},
|
||||
None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertThat(nodes, HasLength(1))
|
||||
self.assertEqual(NODE['uuid'], nodes[0].uuid)
|
||||
|
||||
def test_chassis_node_list_sort_dir(self):
|
||||
self.api = utils.FakeAPI(fake_responses_sorting)
|
||||
self.mgr = ironicclient.v1.chassis.ChassisManager(self.api)
|
||||
nodes = self.mgr.list_nodes(CHASSIS['uuid'], sort_dir='desc')
|
||||
expect = [
|
||||
('GET',
|
||||
'/v1/chassis/%s/nodes?sort_dir=desc' % CHASSIS['uuid'], {},
|
||||
None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertThat(nodes, HasLength(1))
|
||||
self.assertEqual(NODE['uuid'], nodes[0].uuid)
|
||||
|
||||
def test_chassis_node_list_marker(self):
|
||||
self.api = utils.FakeAPI(fake_responses_pagination)
|
||||
self.mgr = ironicclient.v1.chassis.ChassisManager(self.api)
|
||||
nodes = self.mgr.list_nodes(CHASSIS['uuid'], marker=NODE['uuid'])
|
||||
expect = [
|
||||
('GET',
|
||||
'/v1/chassis/%s/nodes?marker=%s' % (CHASSIS['uuid'],
|
||||
NODE['uuid']), {}, None),
|
||||
]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
self.assertThat(nodes, HasLength(1))
|
||||
self.assertEqual(NODE['uuid'], nodes[0].uuid)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user