Retire fairy-slipper API doc tools project
Remove all project contents. Depends-On: I1eae2cc06695a7b363f9bf78224269ebdfce9ae7 Needed-By: Ib3f53f6758c947aa98a93b3ed9e229137ada1719 Change-Id: Ie9ef42f56ecfcb918550165b9afa6e60c7601da3
This commit is contained in:
parent
f0b28704a5
commit
cd8f1e82b4
|
@ -1,4 +0,0 @@
|
|||
[run]
|
||||
branch = True
|
||||
source = fairy_slipper
|
||||
omit = fairy_slipper/openstack/*
|
|
@ -1,65 +0,0 @@
|
|||
*.py[cod]
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Packages
|
||||
*.egg
|
||||
*.egg-info
|
||||
dist
|
||||
build
|
||||
eggs
|
||||
parts
|
||||
bin
|
||||
var
|
||||
sdist
|
||||
develop-eggs
|
||||
.installed.cfg
|
||||
lib
|
||||
lib64
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
.coverage
|
||||
.tox
|
||||
nosetests.xml
|
||||
.testrepository
|
||||
.venv
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
||||
# Mr Developer
|
||||
.mr.developer.cfg
|
||||
.project
|
||||
.pydevproject
|
||||
|
||||
# Complexity
|
||||
output/*.html
|
||||
output/*/index.html
|
||||
|
||||
# Sphinx
|
||||
doc/build
|
||||
releasenotes/build
|
||||
|
||||
# pbr generates these
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
|
||||
# Editors
|
||||
*~
|
||||
.*.swp
|
||||
.*sw?
|
||||
|
||||
node_modules
|
||||
.sass-cache/
|
||||
bower_components
|
||||
.tmp/
|
||||
api-site/
|
||||
conversion_files/
|
||||
conversion_files_valid/
|
||||
api_doc/
|
||||
/public/components/
|
||||
/.eggs/
|
|
@ -1,4 +0,0 @@
|
|||
[gerrit]
|
||||
host=review.openstack.org
|
||||
port=29418
|
||||
project=openstack/fairy-slipper.git
|
3
.mailmap
3
.mailmap
|
@ -1,3 +0,0 @@
|
|||
# Format is:
|
||||
# <preferred e-mail> <other e-mail 1>
|
||||
# <preferred e-mail> <other e-mail 2>
|
|
@ -1,7 +0,0 @@
|
|||
[DEFAULT]
|
||||
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
||||
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
||||
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \
|
||||
${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION
|
||||
test_id_option=--load-list $IDFILE
|
||||
test_list_option=--list
|
26
.travis.yml
26
.travis.yml
|
@ -1,26 +0,0 @@
|
|||
language: python
|
||||
install:
|
||||
- pip install tox
|
||||
script:
|
||||
- tox
|
||||
env:
|
||||
- TOXENV=pep8
|
||||
- TOXENV=pypy
|
||||
- TOXENV=py27
|
||||
- TOXENV=py33
|
||||
- TOXENV=py34
|
||||
- TOXENV=node
|
||||
- TOXENV=coveralls
|
||||
|
||||
before_install:
|
||||
- "[[ \"$TOXENV\" == \"node\" ]] && /sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -ac -screen 0 1280x1024x16 || true"
|
||||
- "[[ \"$TOXENV\" == \"node\" ]] && sudo apt-get install chromium || true"
|
||||
|
||||
before_script:
|
||||
- "[[ \"$TOXENV\" == \"node\" ]] && export CHROME_BIN=chromium || true"
|
||||
- "[[ \"$TOXENV\" == \"node\" ]] && export DISPLAY=:99.0 || true"
|
||||
- "[[ \"$TOXENV\" == \"node\" ]] && sh -e /etc/init.d/xvfb start || true"
|
||||
- "[[ \"$TOXENV\" == \"node\" ]] && sleep 3 || true"
|
||||
|
||||
after_success:
|
||||
- "[[ \"$TOXENV\" == \"coveralls\" ]] && ./.tox/coveralls/bin/coveralls || true"
|
|
@ -1,17 +0,0 @@
|
|||
If you would like to contribute to the development of OpenStack, you must
|
||||
follow the steps in this page:
|
||||
|
||||
http://docs.openstack.org/infra/manual/developers.html
|
||||
|
||||
If you already have a good understanding of how the system works and your
|
||||
OpenStack accounts are set up, you can skip to the development workflow
|
||||
section of this documentation to learn how changes to OpenStack should be
|
||||
submitted for review via the Gerrit tool:
|
||||
|
||||
http://docs.openstack.org/infra/manual/developers.html#development-workflow
|
||||
|
||||
Pull requests submitted through GitHub will be ignored.
|
||||
|
||||
Bugs should be filed on Launchpad, not GitHub:
|
||||
|
||||
https://bugs.launchpad.net/openstack-doc-tools
|
41
Gruntfile.js
41
Gruntfile.js
|
@ -1,41 +0,0 @@
|
|||
|
||||
module.exports = function(grunt) {
|
||||
require('matchdep').filterDev('grunt-*').forEach(grunt.loadNpmTasks);
|
||||
|
||||
var proxySnippet =
|
||||
require('grunt-connect-proxy/lib/utils').proxyRequest;
|
||||
|
||||
grunt.initConfig({
|
||||
pkg: grunt.file.readJSON('package.json'),
|
||||
// The actual grunt server settings
|
||||
// Watches files for changes and runs tasks based on the changed files
|
||||
watch: {
|
||||
all: {
|
||||
options: {
|
||||
nospawn: true
|
||||
},
|
||||
files: [
|
||||
'public/**/*.html',
|
||||
'public/**/*.css',
|
||||
'public/**/*.js',
|
||||
'public/**/*.{png,jpg,jpeg,gif,webp,svg}'
|
||||
],
|
||||
tasks: ["default"]
|
||||
}
|
||||
},
|
||||
reload: {
|
||||
port: 9000,
|
||||
liveReload: {},
|
||||
proxy: {
|
||||
host: "localhost",
|
||||
port: 8080
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
grunt.registerTask('default', [
|
||||
'reload',
|
||||
'watch'
|
||||
]);
|
||||
|
||||
};
|
|
@ -1,4 +0,0 @@
|
|||
fairy-slipper Style Commandments
|
||||
===============================================
|
||||
|
||||
Read the OpenStack Style Commandments http://docs.openstack.org/developer/hacking/
|
176
LICENSE
176
LICENSE
|
@ -1,176 +0,0 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
include AUTHORS
|
||||
include ChangeLog
|
||||
recursive-include public *
|
||||
exclude .gitignore
|
||||
exclude .gitreview
|
||||
|
||||
global-exclude *.pyc
|
92
README.rst
92
README.rst
|
@ -2,92 +2,10 @@
|
|||
fairy-slipper
|
||||
=============
|
||||
|
||||
A project to make OpenStack API's self documententing.
|
||||
This project is no longer maintained.
|
||||
|
||||
* Free software: Apache license
|
||||
* Documentation: doc/source directory
|
||||
* Source: https://git.openstack.org/cgit/openstack/fairy-slipper
|
||||
* Bugs: https://bugs.launchpad.net/openstack-doc-tools
|
||||
The contents of this repository are still available in the Git
|
||||
source code management system. To see the contents of this
|
||||
repository before it reached its end of life, please check out the
|
||||
previous commit with "git checkout HEAD^1".
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
* Migrates WADL source to Swagger files
|
||||
* Provides display of RST plus Swagger JSON files in a web browser
|
||||
|
||||
Development
|
||||
-----------
|
||||
|
||||
First run the migrate script to initially migrate the content from wadl::
|
||||
|
||||
./migrate.sh
|
||||
|
||||
This script will checkout the current version of the documentation.
|
||||
|
||||
To run the webserver use::
|
||||
|
||||
./run_server.sh
|
||||
|
||||
A Pecan based webserver will then listen on http://127.0.0.1:8080
|
||||
|
||||
AngularJS
|
||||
~~~~~~~~~
|
||||
|
||||
To develop the AngularJS component, it's easiest if you use the grunt webserver::
|
||||
|
||||
grunt
|
||||
|
||||
You will still need to run the Fairy-Slipper webserver, but this will
|
||||
enable auto reloading if you visit the port http://127.0.0.1:9000
|
||||
|
||||
Directory Structure (Future)
|
||||
----------------------------
|
||||
|
||||
Current documentation output layout::
|
||||
|
||||
api-doc/ -- the root of the documentation
|
||||
api-doc/index.json -- the index file that lists all the files that are included in the API doc.
|
||||
api-doc/<service>/<version>.rst
|
||||
api-doc/<service>/<version>/<request_schema>.json
|
||||
api-doc/<service>/<version>/<response_schema>_<status_code>.json
|
||||
api-doc/<service>/<version>/examples/<request>_req.json
|
||||
api-doc/<service>/<version>/examples/<response>_resp_<status_code>.json
|
||||
conversion_files_valid/<service-version>.json -- valid Swagger files
|
||||
|
||||
Other Swagger UIs
|
||||
-----------------
|
||||
|
||||
Taken from https://github.com/swagger-api/swagger-spec/wiki/Sites-and-Services
|
||||
|
||||
- http://docs.apimatic.apiary.io/
|
||||
- http://docs.api2cart.com/post/interactive-docs
|
||||
- http://chat.banckle.com/api/v3.0/
|
||||
- http://www.evercam.io/develop/docs
|
||||
- https://api.elastic.io/docs/
|
||||
- https://developer.concur.com/
|
||||
- https://www.callfire.com/api-documentation/rest/version/1.1
|
||||
- https://www.bitmex.com/api/explorer/
|
||||
- https://bitdango.com/api
|
||||
- https://api.groupdocs.com/v2.0/spec/
|
||||
- http://developer.wordnik.com/docs.html
|
||||
- https://api.sensr.net/doc/v3/index.html
|
||||
|
||||
Other API documentation Tools
|
||||
-----------------------------
|
||||
|
||||
- https://github.com/danielgtaylor/aglio
|
||||
|
||||
Alternative Clients
|
||||
-------------------
|
||||
|
||||
- https://github.com/Orange-OpenSource/angular-swagger-ui
|
||||
- https://github.com/apigee-127/swagger-client-API
|
||||
- https://github.com/signalfx/swagger-ajax-client/
|
||||
- https://github.com/signalfx/swagger-angular-client
|
||||
- https://github.com/signalfx/swagger-client-generator
|
||||
|
||||
|
||||
Other Useful Tools
|
||||
------------------
|
||||
|
||||
- http://jsonschema.net/
|
||||
|
|
|
@ -1,209 +0,0 @@
|
|||
# variables in header
|
||||
{}
|
||||
|
||||
# variables in path
|
||||
driver_name:
|
||||
description: |
|
||||
The name of the driver.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
node_id:
|
||||
description: |
|
||||
The UUID of the node.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
port_id:
|
||||
description: |
|
||||
The UUID of the port.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in query
|
||||
fields:
|
||||
description: |
|
||||
One or more chassis fields to be returned in the response.
|
||||
|
||||
For example, the following request returns only the ``description``
|
||||
field for each chassis:
|
||||
|
||||
::
|
||||
|
||||
GET /v1/chassis?fields=description
|
||||
in: query
|
||||
required: false
|
||||
type: array
|
||||
limit:
|
||||
description: |
|
||||
Requests a page size of items. Returns a number
|
||||
of items up to a limit value. Use the ``limit`` parameter to make
|
||||
an initial limited request and use the ID of the last-seen item
|
||||
from the response as the ``marker`` parameter value in a
|
||||
subsequent limited request.
|
||||
in: query
|
||||
required: false
|
||||
type: integer
|
||||
marker:
|
||||
description: |
|
||||
The ID of the last-seen item. Use the ``limit``
|
||||
parameter to make an initial limited request and use the ID of the
|
||||
last-seen item from the response as the ``marker`` parameter value
|
||||
in a subsequent limited request.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
sort_dir:
|
||||
description: |
|
||||
Sorts the response by the requested sort
|
||||
direction. A valid value is ``asc`` (ascending) or ``desc``
|
||||
(descending). Default is ``asc``. You can specify multiple pairs
|
||||
of sort key and sort direction query parameters. If you omit the
|
||||
sort direction in a pair, the API uses the natural sorting
|
||||
direction of the server attribute that is provided as the
|
||||
``sort_key``.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
sort_key:
|
||||
description: |
|
||||
Sorts the response by the this attribute value.
|
||||
Default is ``id``. You can specify multiple pairs of sort key and
|
||||
sort direction query parameters. If you omit the sort direction in
|
||||
a pair, the API uses the natural sorting direction of the server
|
||||
attribute that is provided as the ``sort_key``.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in body
|
||||
chassis:
|
||||
description: |
|
||||
A ``chassis`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
chassis_1:
|
||||
description: |
|
||||
A list of chassis objects.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
created_at:
|
||||
description: |
|
||||
The date and time when the resource was created.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description:
|
||||
description: |
|
||||
A description for the chassis.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
description_1:
|
||||
description: |
|
||||
A description for the chassis.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
drivers:
|
||||
description: |
|
||||
A list of ``drivers`` objects.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
extra:
|
||||
description: |
|
||||
A set of one or more arbitrary metadata key and
|
||||
value pairs for the chassis.
|
||||
in: body
|
||||
required: false
|
||||
type: object
|
||||
extra_1:
|
||||
description: |
|
||||
A set of zero or more arbitrary metadata key and
|
||||
value pairs for the chassis.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
hosts:
|
||||
description: |
|
||||
A list of active hosts that support this driver.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
links:
|
||||
description: |
|
||||
A list that contains a self link and associated
|
||||
chassis links.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
links_1:
|
||||
description: |
|
||||
A list of relative links. Includes the self and
|
||||
bookmark links.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
name:
|
||||
description: |
|
||||
The name of the driver.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
nodes:
|
||||
description: |
|
||||
Links to the collection of nodes contained in
|
||||
this chassis.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
properties:
|
||||
description: |
|
||||
A list of links to driver properties.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
updated_at:
|
||||
description: |
|
||||
The date and time when the resource was updated.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC. In the previous example, the offset value is ``-05:00``.
|
||||
|
||||
If the ``updated_at`` date and time stamp is not set, its value is
|
||||
``null``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
uuid:
|
||||
description: |
|
||||
The UUID for the chassis.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
|
|
@ -1,638 +0,0 @@
|
|||
# variables in header
|
||||
{}
|
||||
|
||||
# variables in path
|
||||
snapshot_id_1:
|
||||
description: |
|
||||
The UUID of the snapshot.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
tenant_id:
|
||||
description: |
|
||||
The UUID of the tenant in a multi-tenancy cloud.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
user_id:
|
||||
description: |
|
||||
The user ID. Specify in the URI as
|
||||
``user_id={user_id}``.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
volume_id:
|
||||
description: |
|
||||
The UUID of the volume.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
volume_type_id:
|
||||
description: |
|
||||
The UUID for an existing volume type.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in query
|
||||
usage:
|
||||
description: |
|
||||
Set to ``usage=true`` to show quota usage.
|
||||
Default is ``false``.
|
||||
in: query
|
||||
required: false
|
||||
type: boolean
|
||||
|
||||
# variables in body
|
||||
attachments:
|
||||
description: |
|
||||
Instance attachment information. If this volume
|
||||
is attached to a server instance, the attachments list includes
|
||||
the UUID of the attached server, an attachment UUID, the name of
|
||||
the attached host, if any, the volume UUID, the device, and the
|
||||
device UUID. Otherwise, this list is empty.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
availability_zone:
|
||||
description: |
|
||||
The availability zone.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
availability_zone_1:
|
||||
description: |
|
||||
The availability zone.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
bootable:
|
||||
description: |
|
||||
Enables or disables the bootable attribute. You
|
||||
can boot an instance from a bootable volume.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
consistencygroup_id:
|
||||
description: |
|
||||
The UUID of the consistency group.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
consistencygroup_id_1:
|
||||
description: |
|
||||
The UUID of the consistency group.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
cores:
|
||||
description: |
|
||||
The number of instance cores that are allowed for
|
||||
each tenant.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
cores_1:
|
||||
description: |
|
||||
A ``cores`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
cores_2:
|
||||
description: |
|
||||
The number of instance cores that are allowed for
|
||||
each tenant.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
created_at:
|
||||
description: |
|
||||
The date and time when the resource was created.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description:
|
||||
description: |
|
||||
The volume description.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
description_1:
|
||||
description: |
|
||||
The volume description.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
encrypted:
|
||||
description: |
|
||||
If true, this volume is encrypted.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
extra_specs:
|
||||
description: |
|
||||
A set of key and value pairs that contains the
|
||||
specifications for a volume type.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
fixed_ips:
|
||||
description: |
|
||||
The number of fixed IP addresses that are allowed
|
||||
for each tenant. Must be equal to or greater than the number of
|
||||
allowed instances.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
fixed_ips_1:
|
||||
description: |
|
||||
A ``fixed_ips`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
fixed_ips_2:
|
||||
description: |
|
||||
The number of fixed IP addresses that are allowed
|
||||
for each tenant. Must be equal to or greater than the number of
|
||||
allowed instances.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
floating_ips:
|
||||
description: |
|
||||
The number of floating IP addresses that are
|
||||
allowed for each tenant.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
floating_ips_1:
|
||||
description: |
|
||||
A ``floating_ips`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
floating_ips_2:
|
||||
description: |
|
||||
The number of floating IP addresses that are
|
||||
allowed for each tenant.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
id:
|
||||
description: |
|
||||
The UUID of the volume.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id_1:
|
||||
description: |
|
||||
The ID for the quota set.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
id_2:
|
||||
description: |
|
||||
The ID for the quota set.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id_3:
|
||||
description: |
|
||||
The ID for the quota set.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
imageRef:
|
||||
description: |
|
||||
The UUID of the image from which you want to
|
||||
create the volume. Required to create a bootable volume.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
in_use:
|
||||
description: |
|
||||
The in use data size. Visible only if you set the
|
||||
``usage=true`` query parameter.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
in_use_1:
|
||||
description: |
|
||||
The number of items in use.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
injected_file_content_bytes:
|
||||
description: |
|
||||
The number of bytes of content that are allowed
|
||||
for each injected file.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
injected_file_content_bytes_1:
|
||||
description: |
|
||||
An ``injected_file_content_bytes`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
injected_file_content_bytes_2:
|
||||
description: |
|
||||
The number of bytes of content that are allowed
|
||||
for each injected file.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
injected_file_path_bytes:
|
||||
description: |
|
||||
The number of bytes that are allowed for each
|
||||
injected file path.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
injected_file_path_bytes_1:
|
||||
description: |
|
||||
An ``injected_file_path_bytes`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
injected_file_path_bytes_2:
|
||||
description: |
|
||||
The number of bytes that are allowed for each
|
||||
injected file path.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
injected_files:
|
||||
description: |
|
||||
The number of injected files that are allowed for
|
||||
each tenant.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
injected_files_1:
|
||||
description: |
|
||||
An ``injected_files`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
injected_files_2:
|
||||
description: |
|
||||
The number of injected files that are allowed for
|
||||
each tenant.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
instances:
|
||||
description: |
|
||||
The number of instances that are allowed for each
|
||||
tenant.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
instances_1:
|
||||
description: |
|
||||
An ``instances`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
instances_2:
|
||||
description: |
|
||||
The number of instances that are allowed for each
|
||||
tenant.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
key_pairs:
|
||||
description: |
|
||||
The number of key pairs that are allowed for each
|
||||
user.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
key_pairs_1:
|
||||
description: |
|
||||
A ``key_pairs`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
key_pairs_2:
|
||||
description: |
|
||||
The number of key pairs that are allowed for each
|
||||
user.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
limit:
|
||||
description: |
|
||||
The number of items permitted for this tenant.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
links:
|
||||
description: |
|
||||
The volume links.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
metadata:
|
||||
description: |
|
||||
One or more metadata key and value pairs that are
|
||||
associated with the volume.
|
||||
in: body
|
||||
required: false
|
||||
type: object
|
||||
metadata_1:
|
||||
description: |
|
||||
One or more metadata key and value pairs that are
|
||||
associated with the volume.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
metadata_2:
|
||||
description: |
|
||||
One or more metadata key and value pairs for the
|
||||
snapshot.
|
||||
in: body
|
||||
required: false
|
||||
type: object
|
||||
metadata_items:
|
||||
description: |
|
||||
The number of metadata items that are allowed for
|
||||
each instance.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
metadata_items_1:
|
||||
description: |
|
||||
A ``metadata_items`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
metadata_items_2:
|
||||
description: |
|
||||
The number of metadata items that are allowed for
|
||||
each instance.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
migration_status:
|
||||
description: |
|
||||
The volume migration status.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
multiattach:
|
||||
description: |
|
||||
To enable this volume to attach to more than one
|
||||
server, set this value to ``true``. Default is ``false``.
|
||||
in: body
|
||||
required: false
|
||||
type: boolean
|
||||
multiattach_1:
|
||||
description: |
|
||||
If true, this volume can attach to more than one
|
||||
instance.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
name:
|
||||
description: |
|
||||
The name of the volume type.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_1:
|
||||
description: |
|
||||
The volume name.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
name_2:
|
||||
description: |
|
||||
The volume name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
quota_set:
|
||||
description: |
|
||||
A ``quota_set`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
quota_set_1:
|
||||
description: |
|
||||
A ``quota_set`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
ram:
|
||||
description: |
|
||||
The amount of instance RAM in megabytes that are
|
||||
allowed for each tenant.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
ram_1:
|
||||
description: |
|
||||
A ``ram`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
ram_2:
|
||||
description: |
|
||||
The amount of instance RAM in megabytes that are
|
||||
allowed for each tenant.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
replication_status:
|
||||
description: |
|
||||
The volume replication status.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
reserved:
|
||||
description: |
|
||||
Reserved volume size. Visible only if you set the
|
||||
``usage=true`` query parameter.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
reserved_1:
|
||||
description: |
|
||||
The number of reserved items.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
scheduler_hints:
|
||||
description: |
|
||||
The dictionary of data to send to the scheduler.
|
||||
in: body
|
||||
required: false
|
||||
type: object
|
||||
security_group_rules:
|
||||
description: |
|
||||
The number of rules that are allowed for each
|
||||
security group.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
security_group_rules_1:
|
||||
description: |
|
||||
A ``security_group_rules`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
security_groups:
|
||||
description: |
|
||||
The number of security groups that are allowed
|
||||
for each tenant.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
security_groups_1:
|
||||
description: |
|
||||
A ``security_groups`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
security_groups_2:
|
||||
description: |
|
||||
The number of security groups that are allowed
|
||||
for each tenant.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
size:
|
||||
description: |
|
||||
The size of the volume, in gibibytes (GiB).
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
snapshot:
|
||||
description: |
|
||||
A ``snapshot`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
snapshot_id:
|
||||
description: |
|
||||
To create a volume from an existing snapshot,
|
||||
specify the UUID of the volume snapshot. The volume is created in
|
||||
same availability zone and with same size as the snapshot.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
snapshot_id_2:
|
||||
description: |
|
||||
The UUID of the source volume snapshot. The API
|
||||
creates a new volume snapshot with the same size as the source
|
||||
volume snapshot.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
source_replica:
|
||||
description: |
|
||||
The UUID of the primary volume to clone.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
source_volid:
|
||||
description: |
|
||||
The UUID of the source volume. The API creates a
|
||||
new volume with the same size as the source volume.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
source_volid_1:
|
||||
description: |
|
||||
The UUID of the source volume.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
status:
|
||||
description: |
|
||||
The volume status.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
updated_at:
|
||||
description: |
|
||||
The date and time when the resource was updated.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC. In the previous example, the offset value is ``-05:00``.
|
||||
|
||||
If the ``updated_at`` date and time stamp is not set, its value is
|
||||
``null``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
user_id_1:
|
||||
description: |
|
||||
The UUID of the user.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
volume:
|
||||
description: |
|
||||
A ``volume`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
volume_type:
|
||||
description: |
|
||||
The volume type. To create an environment with
|
||||
multiple-storage back ends, you must specify a volume type. Block
|
||||
Storage volume back ends are spawned as children to ``cinder-
|
||||
volume``, and they are keyed from a unique queue. They are named
|
||||
``cinder- volume.HOST.BACKEND``. For example, ``cinder-
|
||||
volume.ubuntu.lvmdriver``. When a volume is created, the scheduler
|
||||
chooses an appropriate back end to handle the request based on the
|
||||
volume type. Default is ``None``. For information about how to
|
||||
use volume types to create multiple- storage back ends, see
|
||||
`Configure multiple-storage back ends
|
||||
<http://docs.openstack.org/admin-
|
||||
guide/blockstorage_multi_backend.html>`_.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
volume_type_1:
|
||||
description: |
|
||||
The volume type. In an environment with multiple-
|
||||
storage back ends, the scheduler determines where to send the
|
||||
volume based on the volume type. For information about how to use
|
||||
volume types to create multiple- storage back ends, see `Configure
|
||||
multiple-storage back ends <http://docs.openstack.org/admin-
|
||||
guide/blockstorage_multi_backend.html>`_.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
volumes:
|
||||
description: |
|
||||
A list of ``volume`` objects.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
|
File diff suppressed because it is too large
Load Diff
|
@ -1,616 +0,0 @@
|
|||
# variables in header
|
||||
location:
|
||||
description: |
|
||||
The URL against which to check the status of the
|
||||
action spawned from the request.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
location_1:
|
||||
description: |
|
||||
The URL against which to check the status of the
|
||||
resource.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
|
||||
# variables in path
|
||||
action_id:
|
||||
description: |
|
||||
The UUID of the action.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
cluster_id:
|
||||
description: |
|
||||
The UUID of the cluster.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
event_id:
|
||||
description: |
|
||||
The UUID of the event.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
node_id:
|
||||
description: |
|
||||
The UUID of the node.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
policy_id:
|
||||
description: |
|
||||
The UUID of the policy.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
policy_type:
|
||||
description: |
|
||||
The name of the policy type.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
profile_id_2:
|
||||
description: |
|
||||
The UUID of the profile.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
profile_type:
|
||||
description: |
|
||||
The name of the profile type.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
receiver_id:
|
||||
description: |
|
||||
The UUID of the receiver.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
webhook_id:
|
||||
description: |
|
||||
The UUID of the webhook.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in query
|
||||
V:
|
||||
description: |
|
||||
The webhook implementation version requested.
|
||||
in: query
|
||||
required: true
|
||||
type: string
|
||||
action_1:
|
||||
description: |
|
||||
Filters the response by an action name. Use this
|
||||
filter multiple times to filter by multiple names.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
action_2:
|
||||
description: |
|
||||
Filters the response by the action name
|
||||
associated with an event. Use this filter multiple times to filter
|
||||
by multiple actions.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
action_3:
|
||||
description: |
|
||||
Filters the response by the targeted action of a
|
||||
receiver.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
cluster_id_2:
|
||||
description: |
|
||||
Filters the response by the cluster ID associated
|
||||
with an event. Use this filter multiple times to filter by
|
||||
multiple clusters.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
cluster_id_3:
|
||||
description: |
|
||||
Filters the response by the cluster that owns a
|
||||
node.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
cluster_id_4:
|
||||
description: |
|
||||
Filters the response by the ID of the targeted
|
||||
cluster of a receiver.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
enabled:
|
||||
description: |
|
||||
Filters the response by a policy enabled status
|
||||
on the cluster.
|
||||
in: query
|
||||
required: false
|
||||
type: boolean
|
||||
global_project:
|
||||
description: |
|
||||
Indicates whether to include objects for all
|
||||
projects or objects for the current project in the response. If
|
||||
you are an administrative user and you set this value to ``true``,
|
||||
the call returns all objects from all projects. Default is
|
||||
``false``, which returns only objects in the current project.
|
||||
in: query
|
||||
required: false
|
||||
type: boolean
|
||||
limit:
|
||||
description: |
|
||||
Requests a page size of items. Returns a number
|
||||
of items up to a limit value. Use the ``limit`` parameter to make
|
||||
an initial limited request and use the ID of the last-seen item
|
||||
from the response as the ``marker`` parameter value in a
|
||||
subsequent limited request.
|
||||
in: query
|
||||
required: false
|
||||
type: integer
|
||||
marker:
|
||||
description: |
|
||||
The ID of the last-seen item. Use the ``limit``
|
||||
parameter to make an initial limited request and use the ID of the
|
||||
last-seen item from the response as the ``marker`` parameter value
|
||||
in a subsequent limited request.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
metadata_3:
|
||||
description: |
|
||||
Filters the response by a metadata key and value
|
||||
pair.
|
||||
in: query
|
||||
required: false
|
||||
type: object
|
||||
name_10:
|
||||
description: |
|
||||
Filters the response by the name of a receiver.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
name_4:
|
||||
description: |
|
||||
Filters the response by an action name. Use this
|
||||
filter multiple times to filter by multiple names.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
name_5:
|
||||
description: |
|
||||
Filters the response by a cluster name. Use this
|
||||
filter multiple times to filter by multiple names.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
name_6:
|
||||
description: |
|
||||
Filters the response by the name of a node.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
name_7:
|
||||
description: |
|
||||
Filters the response by the name of a policy.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
name_9:
|
||||
description: |
|
||||
Filters the response by the name of a profile.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
obj_id:
|
||||
description: |
|
||||
Filters the response by the object ID for an
|
||||
event. Use this filter multiple times to filter by multiple
|
||||
objects.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
obj_name:
|
||||
description: |
|
||||
Filters the response by the name of object
|
||||
associated with an event. Use this filter multiple times to filter
|
||||
by multiple objects.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
obj_type:
|
||||
description: |
|
||||
Filters the response by the type of object
|
||||
associated with an event. Use this filter multiple times to filter
|
||||
by multiple objects. A valid value is ``CLUSTER`` or ``NODE``.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
params:
|
||||
description: |
|
||||
The query string that forms the inputs to use for
|
||||
the targeted action.
|
||||
in: query
|
||||
required: false
|
||||
type: object
|
||||
show_details:
|
||||
description: |
|
||||
Indicates whether the node details are returned.
|
||||
Default is ``false``.
|
||||
in: query
|
||||
required: false
|
||||
type: boolean
|
||||
sort:
|
||||
description: |
|
||||
Sorts the response by one or more attribute and optional sort
|
||||
direction combinations. A valid direction is ``asc`` (ascending) or
|
||||
``desc`` (descending). Default direction is ``asc`` (ascending).
|
||||
|
||||
Specify the list as < key > [: < direction > ].
|
||||
|
||||
For example, the following query parameters in the URI sort the
|
||||
objects in the response by ``name`` in ascending order and then by
|
||||
``status`` in descending order:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
GET /v2/images?sort=name:asc,status:desc
|
||||
|
||||
The following query parameters in the URI sort the objects in the
|
||||
response by ``name`` in descending order and then by ``status`` in
|
||||
ascending order.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
GET /v2/images?sort=name:desc,status
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
status:
|
||||
description: |
|
||||
Filters the response by a cluster status. Use
|
||||
this filter multiple times to filter by multiple statuses.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
status_1:
|
||||
description: |
|
||||
Filters the response by the status of a node.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
target:
|
||||
description: |
|
||||
Filters the response by the targeted object ID
|
||||
that is associated with an action. An object can be a cluster, a
|
||||
node, and so on. Use this filter multiple times to filter by
|
||||
multiple targets.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
type:
|
||||
description: |
|
||||
Filters the response by the type of a policy.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
type_1:
|
||||
description: |
|
||||
Filters the response by the type of a profile.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
type_2:
|
||||
description: |
|
||||
Filters the response by the type of a receiver.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
user:
|
||||
description: |
|
||||
Filters the response by the user name of a
|
||||
receiver.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in body
|
||||
action:
|
||||
description: |
|
||||
The action to trigger. Each action takes a
|
||||
different set of parameters. Supported actions include: -
|
||||
``add_nodes`` Add one or more nodes, as a list, to a cluster. For
|
||||
example: - ``del_nodes`` Delete one or more nodes, as a list,
|
||||
from a cluster. For example: - ``scale_out`` Enlarge the
|
||||
cluster by ``count`` number of nodes. For example: -
|
||||
``scale_in`` Shrink the cluster by ``count`` number of nodes. For
|
||||
example: - ``resize`` Change the size of the cluster by
|
||||
``adjustment_type``, ``number``, ``min_step``, ``min_size``,
|
||||
``max_size``, or ``strict`` values. For example: - ``check``
|
||||
Check the health status of a cluster. For example: - ``recover``
|
||||
Recover a cluster from its current unhealthy status. For
|
||||
example: - ``policy_attach`` Attach a policy to a cluster. The
|
||||
request body contains parameters for the policy attachment: -
|
||||
``policy_detach`` Detach a policy from a cluster. The request body
|
||||
contains the ID of the policy: - ``policy_update`` Update the
|
||||
policy attachment. Specify the policy ID and property settings
|
||||
in the request body:
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
action_4:
|
||||
description: |
|
||||
The action to trigger. Each action takes a
|
||||
different set of parameters. Supported actions include: -
|
||||
``check`` Check the health status of a node. For example: -
|
||||
``recover`` Recover a node from its current unhealthy status. For
|
||||
example:
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
cluster:
|
||||
description: |
|
||||
A map of cluster details.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
cluster_id_1:
|
||||
description: |
|
||||
The ID or shortID or name of the cluster the node
|
||||
lives in. If not specified, the node created will be an orphaned
|
||||
node.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
clusters:
|
||||
description: |
|
||||
List of cluster records. Each record contains
|
||||
fields such as ``created_at``, ``id``, ``name``, ``profile_id``,
|
||||
``size``, ``nodes``, ``status``, ``status_reason``, and so on.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
cooldown:
|
||||
description: |
|
||||
The cooldown value, in seconds.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
desired_capacity:
|
||||
description: |
|
||||
The capacity, or initial size, of the cluster.
|
||||
Default is 0.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
level:
|
||||
description: |
|
||||
An integer value that represents the default
|
||||
enforcement level.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
max_size:
|
||||
description: |
|
||||
The maximum size of the cluster. Default is
|
||||
``-1``, which indicates that no upper limit exists for the cluster
|
||||
size.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
metadata:
|
||||
description: |
|
||||
A set of key and value pairs to associate with
|
||||
the cluster.
|
||||
in: body
|
||||
required: false
|
||||
type: object
|
||||
metadata_1:
|
||||
description: |
|
||||
A set of key and value pairs to associate with
|
||||
the node.
|
||||
in: body
|
||||
required: false
|
||||
type: object
|
||||
metadata_2:
|
||||
description: |
|
||||
A list of key and value pairs to associate with
|
||||
the profile.
|
||||
in: body
|
||||
required: false
|
||||
type: array
|
||||
metadata_4:
|
||||
description: |
|
||||
A list of key and value pairs to associate with
|
||||
the target profile.
|
||||
in: body
|
||||
required: false
|
||||
type: object
|
||||
min_size:
|
||||
description: |
|
||||
The minimum size of the cluster. Default is 0.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
name:
|
||||
description: |
|
||||
The name of the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_1:
|
||||
description: |
|
||||
The name of the node to be created.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_11:
|
||||
description: |
|
||||
New name for the target profile.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
name_2:
|
||||
description: |
|
||||
The name for the policy.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_3:
|
||||
description: |
|
||||
The name for the profile.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_8:
|
||||
description: |
|
||||
The name of the policy type.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
node:
|
||||
description: |
|
||||
Detailed data for the node, such as ``id``,
|
||||
``name``, ``status``, and so on.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
node_1:
|
||||
description: |
|
||||
A map with detailed data for the node. **Node
|
||||
update request body** - ``name`` New name for the node
|
||||
(optional). - ``profile_id`` Name, ID, or short ID of the new
|
||||
profile to use by the node. The new profile has to have the same
|
||||
profile type as that of the node (optional). - ``role`` The new
|
||||
role this node plays in a cluster (optional). - ``metadata`` A
|
||||
list of key-value pairs to attach to the updated node
|
||||
(optional).
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
nodes:
|
||||
description: |
|
||||
List of node records. Each record contains fields
|
||||
such as ``id``, ``cluster_id``, ``name``, ``physical_id``,
|
||||
``profile_id``, ``created_at``, ``index``, ``status``,
|
||||
``status_reason``, ``metadata``, ``updated_at``, and so on.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
parent:
|
||||
description: |
|
||||
The UUID of the parent cluster if the cluster is
|
||||
a nested cluster.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
policies:
|
||||
description: |
|
||||
List of policy records. Each record contains
|
||||
fields such as ``id``, ``name``, ``type``, ``spec``, and so on.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
policy:
|
||||
description: |
|
||||
A map with keys and values that specify the
|
||||
details for the policy to be created:
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
policy_1:
|
||||
description: |
|
||||
A map with a set of key and value pairs that
|
||||
specify the details of the policy: **Policy body** - ``name``
|
||||
Name for the policy, if specified.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
profile:
|
||||
description: |
|
||||
A dictionary with profile details. **Profile
|
||||
create response** - ``id`` An unique ID for the profile. -
|
||||
``name`` Name for the profile. - ``type`` Name of policy type
|
||||
referenced by the profile. - ``spec`` Detailed specification
|
||||
based on the profile type. - ``metadata`` A list of key and value
|
||||
pairs that are attached to the profile. - ``created_at`` The
|
||||
UTC date and time stamp when the profile was created. -
|
||||
``updated_at`` The UTC date and time stamp when the profile was
|
||||
updated. - ``domain`` The ID of the domain to which the profile
|
||||
belongs. - ``project`` The ID of the project to which the profile
|
||||
belongs. - ``user`` The ID of the user who created the profile.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
profile_id:
|
||||
description: |
|
||||
The ID or name of the profile for the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
profile_id_1:
|
||||
description: |
|
||||
The ID or shortID or name of the profile for the
|
||||
node.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
profiles:
|
||||
description: |
|
||||
Profile records. Each record contains the ``id``,
|
||||
``name``, ``type``, ``spec``, ``metadata``, and other fields.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
receiver:
|
||||
description: |
|
||||
A map with detailed data for the receiver.
|
||||
**Receiver Create Request Body** - ``name`` Name for the receiver
|
||||
(optional). - ``cluster_id`` Name, ID, or short ID of the object
|
||||
targeted by the receiver (required). - ``type`` The type of the
|
||||
receiver where the only valid value is ``webhook`` currently
|
||||
(required). - ``action`` The action to initiate when the receiver
|
||||
is triggered. A valid value should be the name of an action that
|
||||
can be applied on a cluster. - ``actor`` A map of key and value
|
||||
pairs to use for authentication. If omitted, the requester is
|
||||
assumed to be the actor (optional). - ``params`` A map of key and
|
||||
value pairs to use for action creation. Some actions might
|
||||
require certain input parameters (optional).
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
role:
|
||||
description: |
|
||||
A string indicating the role this node plays in a
|
||||
cluster.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
spec:
|
||||
description: |
|
||||
A detailed specification based on the policy
|
||||
type.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
spec_1:
|
||||
description: |
|
||||
Detailed specification based on the chosen
|
||||
profile type.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
timeout:
|
||||
description: |
|
||||
The timeout value, in minutes, for cluster
|
||||
creation. Default is 60.
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
|
|
@ -1,980 +0,0 @@
|
|||
# variables in header
|
||||
Content-Length:
|
||||
description: |
|
||||
The length of the data, in bytes.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
|
||||
# variables in path
|
||||
cluster_id:
|
||||
description: |
|
||||
The ID of the cluster
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
cluster_template_id_1:
|
||||
description: |
|
||||
The unique identifier of the cluster template.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
data_source_id:
|
||||
description: |
|
||||
The UUID of the data source.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
hints:
|
||||
description: |
|
||||
Includes configuration hints in the response.
|
||||
in: path
|
||||
required: false
|
||||
type: boolean
|
||||
image_id:
|
||||
description: |
|
||||
The UUID of the image.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
job_binary_id:
|
||||
description: |
|
||||
The UUID of the job binary.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
job_binary_internals_id:
|
||||
description: |
|
||||
The UUID of the job binary internal.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
job_execution_id:
|
||||
description: |
|
||||
The UUID of the job execution.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
job_id:
|
||||
description: |
|
||||
The UUID of the job.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
name_3:
|
||||
description: |
|
||||
The name of the job binary internal.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
node_group_template_id:
|
||||
description: |
|
||||
The UUID of the node group template.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
plugin:
|
||||
description: |
|
||||
Filters the response by a plugin name.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
plugin_name_1:
|
||||
description: |
|
||||
Name of the plugin.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
type_2:
|
||||
description: |
|
||||
Filters the response by a job type.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
version:
|
||||
description: |
|
||||
Filters the response by a plugin version.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
version_1:
|
||||
description: |
|
||||
Version of the plugin.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in query
|
||||
{}
|
||||
|
||||
# variables in body
|
||||
args:
|
||||
description: |
|
||||
The list of arguments.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
auto_security_group:
|
||||
description: |
|
||||
If set to ``True``, the cluster group is
|
||||
automatically secured.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
availability_zone:
|
||||
description: |
|
||||
The availability of the node in the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
binaries:
|
||||
description: |
|
||||
The list of job binary internal objects.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
cluster_configs:
|
||||
description: |
|
||||
A set of key and value pairs that contain the
|
||||
cluster configuration.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
cluster_id_1:
|
||||
description: |
|
||||
The UUID of the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
cluster_template_id:
|
||||
description: |
|
||||
The UUID of the cluster template.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
clusters:
|
||||
description: |
|
||||
The list of clusters.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
configs:
|
||||
description: |
|
||||
The mappings of the job tasks.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
count:
|
||||
description: |
|
||||
The number of nodes in the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
created:
|
||||
description: |
|
||||
The date and time when the image was created.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
created_at:
|
||||
description: |
|
||||
The date and time when the cluster was created.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
The ``±hh:mm`` value, if included, returns the time zone as an
|
||||
offset from UTC.
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
created_at_1:
|
||||
description: |
|
||||
The date and time when the object was created.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
The ``±hh:mm`` value, if included, returns the time zone as an
|
||||
offset from UTC.
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
created_at_2:
|
||||
description: |
|
||||
The date and time when the node was created in the cluster.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
The ``±hh:mm`` value, if included, returns the time zone as an
|
||||
offset from UTC.
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
created_at_3:
|
||||
description: |
|
||||
The date and time when the job execution object was created.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
The ``±hh:mm`` value, if included, returns the time zone as an
|
||||
offset from UTC.
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
data_source_urls:
|
||||
description: |
|
||||
The data source URLs.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
datasize:
|
||||
description: |
|
||||
The size of the data stored in the internal
|
||||
database.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
default_image_id:
|
||||
description: |
|
||||
The default ID of the image.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description:
|
||||
description: |
|
||||
The description of the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description_1:
|
||||
description: |
|
||||
The description of the data source object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description_2:
|
||||
description: |
|
||||
The description of the job object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description_3:
|
||||
description: |
|
||||
The description of the node in the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description_4:
|
||||
description: |
|
||||
The description of the job binary object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description_5:
|
||||
description: |
|
||||
The description of the image.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description_6:
|
||||
description: |
|
||||
The full description of the plugin.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description_7:
|
||||
description: |
|
||||
Description of the image.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
end_time:
|
||||
description: |
|
||||
The end date and time of the job execution.
|
||||
|
||||
The date and time when the job completed execution.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
The ``±hh:mm`` value, if included, returns the time zone as an
|
||||
offset from UTC.
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
flavor_id:
|
||||
description: |
|
||||
The ID of the flavor.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
floating_ip_pool:
|
||||
description: |
|
||||
The UUID of the pool in the template.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
hadoop_version:
|
||||
description: |
|
||||
The version of the Hadoop used in the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
hadoop_version_1:
|
||||
description: |
|
||||
The version of the Hadoop.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id:
|
||||
description: |
|
||||
The UUID of the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id_1:
|
||||
description: |
|
||||
The ID of the object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id_2:
|
||||
description: |
|
||||
The UUID of the node in the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id_3:
|
||||
description: |
|
||||
The UUID of the job execution object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id_4:
|
||||
description: |
|
||||
The UUID of the image.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
image:
|
||||
description: |
|
||||
A set of key and value pairs that contain image
|
||||
properties.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
image_id_1:
|
||||
description: |
|
||||
The UUID of the image.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
images:
|
||||
description: |
|
||||
The list of images and their properties.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
info:
|
||||
description: |
|
||||
A set of key and value pairs that contain cluster
|
||||
information.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
info_1:
|
||||
description: |
|
||||
The report of the executed job objects.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
input_id:
|
||||
description: |
|
||||
The UUID of the input.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
interface:
|
||||
description: |
|
||||
The interfaces of the job object.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
is_default:
|
||||
description: |
|
||||
If set to ``true``, the cluster is the default
|
||||
cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
is_default_1:
|
||||
description: |
|
||||
If set to ``True`` the node is the default node
|
||||
in the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
is_protected:
|
||||
description: |
|
||||
If set to ``true``, the cluster is protected.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
is_protected_1:
|
||||
description: |
|
||||
If set to ``true``, the object is protected.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
is_protected_2:
|
||||
description: |
|
||||
If set to ``true``, the node is protected.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
is_protected_3:
|
||||
description: |
|
||||
If set to ``true``, the job execution object is
|
||||
protected.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
is_proxy_gateway:
|
||||
description: |
|
||||
If set to ``true``, the node is the proxy
|
||||
gateway.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
is_public:
|
||||
description: |
|
||||
If set to ``true``, the cluster is public.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
is_public_1:
|
||||
description: |
|
||||
If set to ``true``, the object is public.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
is_public_2:
|
||||
description: |
|
||||
If set to ``True``, the node is public in the
|
||||
cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
is_public_3:
|
||||
description: |
|
||||
If set to ``true``, the job execution object is
|
||||
public.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
is_transient:
|
||||
description: |
|
||||
If set to ``true``, the cluster is transient.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
job_execution:
|
||||
description: |
|
||||
A set of key and value pairs that contain the job
|
||||
object.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
job_executions:
|
||||
description: |
|
||||
The list of job execution objects.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
job_id_1:
|
||||
description: |
|
||||
The UUID of the job object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
job_types:
|
||||
description: |
|
||||
The list of plugins and their job types.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
jobs:
|
||||
description: |
|
||||
The list of the jobs.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
libs:
|
||||
description: |
|
||||
The list of the job object properties.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
mains:
|
||||
description: |
|
||||
The list of the job object and their properties.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
management_public_key:
|
||||
description: |
|
||||
The SSH key for the management network.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
metadata:
|
||||
description: |
|
||||
A set of key and value pairs that contain image
|
||||
metadata.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
minDisk:
|
||||
description: |
|
||||
The minimum disk space, in GB.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
minRam:
|
||||
description: |
|
||||
The minimum amount of random access memory (RAM)
|
||||
for the image, in GB.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
name:
|
||||
description: |
|
||||
The name of the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_1:
|
||||
description: |
|
||||
The name of the object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_2:
|
||||
description: |
|
||||
The name of the node.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_4:
|
||||
description: |
|
||||
The name of the operating system image.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_5:
|
||||
description: |
|
||||
The name of the plugin.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
neutron_management_network:
|
||||
description: |
|
||||
The UUID of the neutron management network.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
node_configs:
|
||||
description: |
|
||||
A set of key and value pairs that contain the
|
||||
node configuration in the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
node_groups:
|
||||
description: |
|
||||
The detail properties of the node in key-value
|
||||
pairs.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
node_processes:
|
||||
description: |
|
||||
The list of the processes performed by the node.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
oozie_job_id:
|
||||
description: |
|
||||
The UUID of the ``oozie_job``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
output_id:
|
||||
description: |
|
||||
The UUID of the output of job execution object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
params:
|
||||
description: |
|
||||
The mappings of values to the parameters.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
plugin_name:
|
||||
description: |
|
||||
The name of the plugin.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
plugins:
|
||||
description: |
|
||||
The list of plugins.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
progress:
|
||||
description: |
|
||||
A progress indicator, as a percentage value, for
|
||||
the amount of image content that has been processed.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
provision_progress:
|
||||
description: |
|
||||
A list of the cluster progresses.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
return_code:
|
||||
description: |
|
||||
The code returned after job has executed.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
security_groups:
|
||||
description: |
|
||||
The security groups of the node.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
shares:
|
||||
description: |
|
||||
The shares of the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
shares_1:
|
||||
description: |
|
||||
The sharing of resources in the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
start_time:
|
||||
description: |
|
||||
The date and time when the job started.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
The ``±hh:mm`` value, if included, returns the time zone as an
|
||||
offset from UTC.
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
status:
|
||||
description: |
|
||||
The status of the cluster.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
status_1:
|
||||
description: |
|
||||
The current status of the image.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
status_description:
|
||||
description: |
|
||||
The description of the cluster status.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
tags:
|
||||
description: |
|
||||
List of tags to add.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
tags_1:
|
||||
description: |
|
||||
Lists images only with specific tag. Can be used
|
||||
multiple times.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
tags_2:
|
||||
description: |
|
||||
One or more image tags.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
tags_3:
|
||||
description: |
|
||||
List of tags to remove.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
tenant_id:
|
||||
description: |
|
||||
The UUID of the tenant.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
title:
|
||||
description: |
|
||||
The title of the plugin.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
trust_id:
|
||||
description: |
|
||||
The id of the trust.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
type:
|
||||
description: |
|
||||
The type of the data source object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
type_1:
|
||||
description: |
|
||||
The type of the job object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
updated:
|
||||
description: |
|
||||
The date and time when the image was updated.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
updated_at:
|
||||
description: |
|
||||
The date and time when the cluster was updated.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
The ``±hh:mm`` value, if included, returns the time zone as an
|
||||
offset from UTC.
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
updated_at_1:
|
||||
description: |
|
||||
The date and time when the object was updated.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
The ``±hh:mm`` value, if included, returns the time zone as an
|
||||
offset from UTC.
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
updated_at_2:
|
||||
description: |
|
||||
The date and time when the node was updated.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
The ``±hh:mm`` value, if included, returns the time zone as an
|
||||
offset from UTC.
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
updated_at_3:
|
||||
description: |
|
||||
The date and time when the job execution object was updated.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
The ``±hh:mm`` value, if included, returns the time zone as an
|
||||
offset from UTC.
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
url:
|
||||
description: |
|
||||
The url of the data source object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
url_1:
|
||||
description: |
|
||||
The url of the job binary object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
use_autoconfig:
|
||||
description: |
|
||||
If set to ``true``, the cluster is auto
|
||||
configured.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
use_autoconfig_1:
|
||||
description: |
|
||||
If set to ``true``, the node is auto configured.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
username:
|
||||
description: |
|
||||
The name of the user for the image.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
username_1:
|
||||
description: |
|
||||
The user name to log in to an instance operating
|
||||
system for remote operations execution.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
versions:
|
||||
description: |
|
||||
The list of plugin versions.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
volume_local_to_instance:
|
||||
description: |
|
||||
If set to ``true``, the volume is local to the
|
||||
instance.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
volume_mount_prefix:
|
||||
description: |
|
||||
The mount point of the node.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
volume_type:
|
||||
description: |
|
||||
The type of volume in a node.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
volumes_availability_zone:
|
||||
description: |
|
||||
The availability zone of the volumes.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
volumes_per_node:
|
||||
description: |
|
||||
The number of volumes for the node.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
volumes_size:
|
||||
description: |
|
||||
The size of the volumes in a node.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
|
|
@ -1,93 +0,0 @@
|
|||
.. -*- rst -*-
|
||||
|
||||
============
|
||||
API versions
|
||||
============
|
||||
|
||||
Lists information for all Database Service API versions and shows
|
||||
Database Service v1.0 details.
|
||||
|
||||
|
||||
List versions
|
||||
=============
|
||||
|
||||
.. rest_method:: GET /
|
||||
|
||||
Lists information about all Database Service API versions.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: listVersions-dbaas-v1.yaml
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-versions-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Show version details
|
||||
====================
|
||||
|
||||
.. rest_method:: GET /v1.0
|
||||
|
||||
Shows details for the Database Service API v1.0.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: showVersionInfo-dbaas-v1.yaml
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-version-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,327 +0,0 @@
|
|||
.. -*- rst -*-
|
||||
|
||||
=====================================
|
||||
Configuration groups (configurations)
|
||||
=====================================
|
||||
|
||||
Creates and lists all configuration groups.
|
||||
|
||||
|
||||
Create configuration group
|
||||
==========================
|
||||
|
||||
.. rest_method:: POST /v1.0/{accountId}/configurations
|
||||
|
||||
Creates a configuration group.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: createConfigGroup.yaml
|
||||
|
||||
- datastore: datastore
|
||||
- values: values
|
||||
- name: name
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-create-config-grp-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-create-config-grp-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
List configuration groups
|
||||
=========================
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/configurations
|
||||
|
||||
Lists all configuration groups.
|
||||
|
||||
The list includes the associated data store and data store version
|
||||
for each configuration group.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: listConfigGroups.yaml
|
||||
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-list-cfg-groups-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
List configuration group instances
|
||||
==================================
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/configurations/{configId}/instances
|
||||
|
||||
Lists the instances associated with the specified configuration group.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: listConfigInstances.yaml
|
||||
|
||||
- configId: configId
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-config-group-instances-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Delete configuration group
|
||||
==========================
|
||||
|
||||
.. rest_method:: DELETE /v1.0/{accountId}/configurations/{configId}
|
||||
|
||||
Deletes a configuration group.
|
||||
|
||||
Error response codes:202,413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: deleteConfigGroup.yaml
|
||||
|
||||
- configId: configId
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Patch configuration group
|
||||
=========================
|
||||
|
||||
.. rest_method:: PATCH /v1.0/{accountId}/configurations/{configId}
|
||||
|
||||
Sets new values for a configuration group.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: patchConfigGroup.yaml
|
||||
|
||||
- values: values
|
||||
- configId: configId
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-patch-config-grp-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-patch-config-grp-response-json-http.txt
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Show configuration group details
|
||||
================================
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/configurations/{configId}
|
||||
|
||||
Lists details about a configuration group, including its values.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: listConfigDetails.yaml
|
||||
|
||||
- configId: configId
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-config-group-details-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Update configuration group
|
||||
==========================
|
||||
|
||||
.. rest_method:: PUT /v1.0/{accountId}/configurations/{configId}
|
||||
|
||||
Sets new values for a configuration group. Also lets you change the name and description of the configuration group.
|
||||
|
||||
Error response codes:202,413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: updateConfigGroup.yaml
|
||||
|
||||
- values: values
|
||||
- description: description
|
||||
- name: name
|
||||
- configId: configId
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-update-config-grp-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,279 +0,0 @@
|
|||
.. -*- rst -*-
|
||||
|
||||
==================================
|
||||
Database instance actions (action)
|
||||
==================================
|
||||
|
||||
Resizes instances and volumes and restarts instances.
|
||||
|
||||
|
||||
Restart instance
|
||||
================
|
||||
|
||||
.. rest_method:: POST /v1.0/{accountId}/instances/{instanceId}/action
|
||||
|
||||
Restarts the database service for an instance.
|
||||
|
||||
The restart operation restarts only the MySQL instance. Restarting
|
||||
MySQL erases any dynamic configuration settings that you make in
|
||||
MySQL.
|
||||
|
||||
The MySQL service is unavailable until the instance restarts.
|
||||
|
||||
If the operation succeeds, it returns the ``Accepted (202)``
|
||||
response code.
|
||||
|
||||
Error response codes:202,413,415,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: restartInstance.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-instance-restart-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Resize instance
|
||||
===============
|
||||
|
||||
.. rest_method:: POST /v1.0/{accountId}/instances/{instanceId}/action
|
||||
|
||||
Resizes the memory for an instance.
|
||||
|
||||
If you provide a valid ``flavorRef``, this operation changes the
|
||||
memory size of the instance, and restarts MySQL.
|
||||
|
||||
Error response codes:202,413,415,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: resizeInstance.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-instance-resize-instance-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Resize instance volume
|
||||
======================
|
||||
|
||||
.. rest_method:: POST /v1.0/{accountId}/instances/{instanceId}/action
|
||||
|
||||
Resizes the volume that is attached to an instance.
|
||||
|
||||
You can use this operation to increase but not decrease the volume
|
||||
size. A valid volume size is an integer value in gigabytes (GB).
|
||||
|
||||
You cannot increase the volume to a size that is larger than the
|
||||
API volume size limit.
|
||||
|
||||
If this operation succeeds, it returns a 202 Accepted response.
|
||||
|
||||
Error response codes:202,413,415,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: resizeVolume.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-instance-resize-volume-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Promote instance to replica source
|
||||
==================================
|
||||
|
||||
.. rest_method:: POST /v1.0/{accountId}/instances/{instanceId}/action
|
||||
|
||||
Promotes a replica.
|
||||
|
||||
If you have set up replication, and the base instance is still
|
||||
reachable, you can use this operation to promote a replica to be
|
||||
the new base instance.
|
||||
|
||||
This can be useful if you want to make a configuration change to
|
||||
the base instance that your replicas are replicating from. For
|
||||
example, you might want to increase the disk or CPU capacity. If
|
||||
you made the change on the base instance directly, you would need
|
||||
to take the base instance down for the duration of the operation.
|
||||
Instead, you can create a replica, make the configuration change on
|
||||
the replica, and then promote the replica to become the new base
|
||||
instance.
|
||||
|
||||
For ``instanceId``, pass in the instance ID of the replica you want
|
||||
to promote.
|
||||
|
||||
Error response codes:202,413,415,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: promoteToReplicaSource.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-instance-promote-replica-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Delete replication base instance
|
||||
================================
|
||||
|
||||
.. rest_method:: POST /v1.0/{accountId}/instances/{instanceId}/action
|
||||
|
||||
Deletes the base instance in a replication set.
|
||||
|
||||
If the base instance becomes unreachable, you can use this
|
||||
operation to delete the base instance.
|
||||
|
||||
This operation:
|
||||
|
||||
- Finds the replica that has processed the greatest number of
|
||||
transactions and picks that replica to use as the new base
|
||||
instance.
|
||||
|
||||
- Transfers the public IP of the old base instance to the new base
|
||||
instance (which is the newly-promoted replica).
|
||||
|
||||
- Deletes the old base instance.
|
||||
|
||||
- Takes all the instances in the replication set and makes them
|
||||
start replicating from the new base instance.
|
||||
|
||||
For ``instanceId``, pass in the instance ID of the unreachable base
|
||||
instance.
|
||||
|
||||
Error response codes:202,413,415,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: ejectReplicaSource.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-instance-eject-replica-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,441 +0,0 @@
|
|||
.. -*- rst -*-
|
||||
|
||||
==============================
|
||||
Database instances (instances)
|
||||
==============================
|
||||
|
||||
Creates, lists, shows details for, attaches a configuration group
|
||||
to, detaches a configuration group from, deletes, lists
|
||||
configuration defaults, creates root, and determines whether root
|
||||
is enables for instances.
|
||||
|
||||
|
||||
Delete database instance
|
||||
========================
|
||||
|
||||
.. rest_method:: DELETE /v1.0/{accountId}/instances/{instanceId}
|
||||
|
||||
Deletes a database instance, including any associated data.
|
||||
|
||||
This operation does not delete any read slaves.
|
||||
|
||||
You cannot complete this operation when the instance state is
|
||||
either ``REBUILDING`` or ``BUILDING``.
|
||||
|
||||
Error response codes:202,413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: deleteInstance.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Show database instance details
|
||||
==============================
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/instances/{instanceId}
|
||||
|
||||
Shows database instance details.
|
||||
|
||||
Lists the status and details of the database instance.
|
||||
|
||||
Lists the volume size in gigabytes (GB) and the approximate GB
|
||||
used.
|
||||
|
||||
After instance creation, the ``used`` value is greater than 0,
|
||||
which is expected and due to the automatic creation of non-empty
|
||||
transaction logs for MySQL optimization. The response does not
|
||||
include the ``used`` attribute when the instance status is
|
||||
``BUILD``, ``REBOOT``, ``RESIZE``, or ``ERROR``.
|
||||
|
||||
The list operations return a DNS-resolvable host name for the
|
||||
database instance rather than an IP address. Because the host name
|
||||
always resolves to the correct IP address for the database
|
||||
instance, you do not need to maintain the mapping. Although the IP
|
||||
address might change when you resize, migrate, or perform other
|
||||
operations, the host name always resolves to the correct database
|
||||
instance.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: showInstanceById.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-instance-status-detail-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Attach configuration group
|
||||
==========================
|
||||
|
||||
.. rest_method:: PUT /v1.0/{accountId}/instances/{instanceId}
|
||||
|
||||
Attaches a configuration group to an instance.
|
||||
|
||||
Error response codes:202,413,415,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: attachConfigGroup.yaml
|
||||
|
||||
- configuration: configuration
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-attach-config-grp-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Detach configuration group
|
||||
==========================
|
||||
|
||||
.. rest_method:: PUT /v1.0/{accountId}/instances/{instanceId}
|
||||
|
||||
Detaches a configuration group from an instance.
|
||||
|
||||
When you pass in only an instance ID and omit the configuration ID,
|
||||
this operation detaches any configuration group that was attached
|
||||
to the instance.
|
||||
|
||||
Error response codes:202,413,415,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: detachConfigGroup.yaml
|
||||
|
||||
- configuration: configuration
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-detach-config-grp-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Detach replica
|
||||
==============
|
||||
|
||||
.. rest_method:: PATCH /v1.0/{accountId}/instances/{instanceId}
|
||||
|
||||
Detaches a replica from its replication source.
|
||||
|
||||
If you created an instance that is a replica of a source instance,
|
||||
you can detach the replica from the source. This can be useful if
|
||||
the source becomes unavailable. In this case, you can detach the
|
||||
replica from the source, making the replica a standalone database
|
||||
instance. You can then take the new standalone instance and create
|
||||
a new replica of that instance.
|
||||
|
||||
Error response codes:202,413,415,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: detachReplica.yaml
|
||||
|
||||
- replica_of: replica_of
|
||||
- slave_of: slave_of
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-detach-replica-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Create database instance
|
||||
========================
|
||||
|
||||
.. rest_method:: POST /v1.0/{accountId}/instances
|
||||
|
||||
Creates a database instance.
|
||||
|
||||
Asynchronously provisions a database instance. You must specify a
|
||||
flavor and a volume size. The service provisions the instance with
|
||||
a volume of the requested size, which serves as storage for the
|
||||
database instance.
|
||||
|
||||
**Notes**
|
||||
|
||||
- You can create only one database instance per POST request.
|
||||
|
||||
- You can create a database instance with one or more databases. You
|
||||
associate users with each database.
|
||||
|
||||
- The default binding for the MySQL instance is port 3306.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: createInstance.yaml
|
||||
|
||||
- users: users
|
||||
- password: password
|
||||
- datastore_version: datastore_version
|
||||
- name: name
|
||||
- flavorRef: flavorRef
|
||||
- characterSet: characterSet
|
||||
- replica_count: replica_count
|
||||
- instance: instance
|
||||
- collate: collate
|
||||
- databases: databases
|
||||
- datastore: datastore
|
||||
- configuration: configuration
|
||||
- type: type
|
||||
- replica_of: replica_of
|
||||
- size: size
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-create-instance-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
Response Parameters
|
||||
-------------------
|
||||
|
||||
.. rest_parameters:: createInstance.yaml
|
||||
|
||||
- updated: updated
|
||||
- name: name
|
||||
- created: created
|
||||
- characterSet: characterSet
|
||||
- instance: instance
|
||||
- collate: collate
|
||||
- databases: databases
|
||||
- flavor: flavor
|
||||
- users: users
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-create-instance-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
List database instances
|
||||
=======================
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/instances
|
||||
|
||||
Lists information, including status, for all database instances.
|
||||
|
||||
Lists status and information for all database instances.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: listInstances.yaml
|
||||
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-instances-index-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
List configuration defaults
|
||||
===========================
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/instances/{instanceId}/configuration
|
||||
|
||||
Lists the configuration defaults for an instance.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: listConfigDefaults.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-list-cfg-defaults-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,143 +0,0 @@
|
|||
.. -*- rst -*-
|
||||
|
||||
=====================
|
||||
Databases (databases)
|
||||
=====================
|
||||
|
||||
Creates, lists all, and deletes databases.
|
||||
|
||||
|
||||
Delete database
|
||||
===============
|
||||
|
||||
.. rest_method:: DELETE /v1.0/{accountId}/instances/{instanceId}/databases/{databaseName}
|
||||
|
||||
Deletes a database.
|
||||
|
||||
This operation also deletes all data that is associated with the
|
||||
database.
|
||||
|
||||
Error response codes:202,413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: deleteDatabase.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- databaseName: databaseName
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Create database
|
||||
===============
|
||||
|
||||
.. rest_method:: POST /v1.0/{accountId}/instances/{instanceId}/databases
|
||||
|
||||
Creates a database within an instance.
|
||||
|
||||
The ``name`` of the database is a required attribute.
|
||||
|
||||
Error response codes:202,413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: createDatabase.yaml
|
||||
|
||||
- characterSet: characterSet
|
||||
- collate: collate
|
||||
- name: name
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-create-databases-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
List instance databases
|
||||
=======================
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/instances/{instanceId}/databases
|
||||
|
||||
Lists databases for an instance.
|
||||
|
||||
This operation returns only the user-defined databases and not the
|
||||
system databases. Only the database administrator can view the
|
||||
``mysql``, ``information_schema``, and ``lost+found`` system
|
||||
databases.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: listDatabases.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-list-databases-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,149 +0,0 @@
|
|||
.. -*- rst -*-
|
||||
|
||||
========================
|
||||
Data stores (datastores)
|
||||
========================
|
||||
|
||||
Lists data store versions, lists parameters for data stores, and
|
||||
shows parameter details for a data store version.
|
||||
|
||||
|
||||
Show configuration parameter details
|
||||
====================================
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/datastores/versions/{datastore_version_id}/parameters/{parameter_name}
|
||||
|
||||
Displays details for a configuration parameter associated with a data store version.
|
||||
|
||||
Details include the type, minimum and maximum values, and whether
|
||||
you must restart the instance after you change the parameter value.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: showParameterDetails.yaml
|
||||
|
||||
- parameter_name: parameter_name
|
||||
- datastore_version_id: datastore_version_id
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-show-parameter-details.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
List datastore versions
|
||||
=======================
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/datastores/{datastore_name}/versions
|
||||
|
||||
Lists the available versions of a data store.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: listDatastoreVersions.yaml
|
||||
|
||||
- datastore_name: datastore_name
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-list-datastore-versions.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
List configuration parameters
|
||||
=============================
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/datastores/versions/{datastore_version_id}/parameters
|
||||
|
||||
Lists the available configuration parameters for a data store version.
|
||||
|
||||
Parameter information includes the type, minimum and maximum
|
||||
values, and whether you must restart the instance after you change
|
||||
a parameter value.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: listParameters.yaml
|
||||
|
||||
- datastore_version_id: datastore_version_id
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-list-parameters-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,108 +0,0 @@
|
|||
.. -*- rst -*-
|
||||
|
||||
=================
|
||||
Flavors (flavors)
|
||||
=================
|
||||
|
||||
Lists all flavors and shows details for a flavor, by ID.
|
||||
|
||||
|
||||
Show flavor details
|
||||
===================
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/flavors/{flavorId}
|
||||
|
||||
Shows flavor details with details of the RAM.
|
||||
|
||||
This resource is identical to the flavors found in the OpenStack
|
||||
Compute API, but without the disk property.
|
||||
|
||||
The ``flavorId`` parameter must be an integer value. If you use a
|
||||
floating point value for this parameter, this call truncates the
|
||||
decimal portion and uses the integer portion as the ``flavorId``
|
||||
value.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: showFlavorById.yaml
|
||||
|
||||
- flavorId: flavorId
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-flavors-by-id-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
List flavors
|
||||
============
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/flavors
|
||||
|
||||
Lists information for all available flavors.
|
||||
|
||||
This operation lists information for all available flavors.
|
||||
|
||||
This resource is identical to the flavors found in the OpenStack
|
||||
Nova API, but without the disk property.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: listFlavors.yaml
|
||||
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-flavors-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,293 +0,0 @@
|
|||
# variables in header
|
||||
{}
|
||||
|
||||
# variables in path
|
||||
accountId:
|
||||
description: |
|
||||
The account ID of the owner of the instance.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
configId:
|
||||
description: |
|
||||
The ID of the configuration group.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
databaseName:
|
||||
description: |
|
||||
The name for the database.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
datastore_name:
|
||||
description: |
|
||||
The name of the data store.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
datastore_version_id:
|
||||
description: |
|
||||
The UUID of the data store version.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
flavorId:
|
||||
description: |
|
||||
The ID for the flavor.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
instanceId:
|
||||
description: |
|
||||
The ID for the database instance.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
parameter_name:
|
||||
description: |
|
||||
The name of the parameter for which to show
|
||||
details.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in query
|
||||
{}
|
||||
|
||||
# variables in body
|
||||
characterSet:
|
||||
description: |
|
||||
A set of symbols and encodings. Default is
|
||||
``utf8``. For information about supported character sets and
|
||||
collations, see `Character Sets and Collations in MySQL
|
||||
<http://dev.mysql.com/doc/refman/5.1/en/charset-mysql.html>`_.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
collate:
|
||||
description: |
|
||||
A set of rules for comparing characters in a
|
||||
character set. Default is ``utf8_general_ci``. For information
|
||||
about supported character sets and collations, see `Character Sets
|
||||
and Collations in MySQL <http://dev.mysql.com/doc/refman/5.1/en
|
||||
/charset-mysql.html>`_.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
configuration:
|
||||
description: |
|
||||
ID of the configuration group that you want to
|
||||
attach to the instance.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
configuration_1:
|
||||
description: |
|
||||
ID of the configuration group to attach to the
|
||||
instance.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
configuration_2:
|
||||
description: |
|
||||
To detach a configuration group, set the
|
||||
configuration parameter to null.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
created:
|
||||
description: |
|
||||
The date and time when the resource was created.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC. In the previous example, the offset value is ``-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
databases:
|
||||
description: |
|
||||
A ``databases`` object.
|
||||
in: body
|
||||
required: false
|
||||
type: array
|
||||
datastore:
|
||||
description: |
|
||||
Data store assigned to the configuration group.
|
||||
Required if you did not configure the default data store.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
datastore_1:
|
||||
description: |
|
||||
Name of the datastore to use when creating the
|
||||
instance.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
datastore_version:
|
||||
description: |
|
||||
Name of the datastore version to use when
|
||||
creating the instance.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
description:
|
||||
description: |
|
||||
New description of the configuration group.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
flavor:
|
||||
description: |
|
||||
A ``flavor`` object, which includes the flavor ID
|
||||
(integer) and flavor relative links.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
flavorRef:
|
||||
description: |
|
||||
Reference (href), which is the actual URI to a
|
||||
flavor as it appears in the list flavors response. Rather than
|
||||
the flavor URI, you can also pass the flavor ID (integer) as the
|
||||
``flavorRef`` value. For example, ``1``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
instance:
|
||||
description: |
|
||||
An ``instance`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
name:
|
||||
description: |
|
||||
Name of the configuration group you are creating.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_1:
|
||||
description: |
|
||||
A database name. You cannot use the
|
||||
``lost+found``, ``information_schema``, or ``mysql`` database name
|
||||
to create a database because these names are reserved for system
|
||||
databases. Valid characters in a database name are: - Upper and
|
||||
lower case letters. - Numbers. - ``@``, ``?``, ``#``, and spaces
|
||||
except at the beginning or end of the database name. - ``_`` is
|
||||
allowed anywhere in the database name. You cannot use these
|
||||
characters in a database name: The maximum length of a database
|
||||
name is 64 characters.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
name_2:
|
||||
description: |
|
||||
The user name for the database on instance
|
||||
creation.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
name_3:
|
||||
description: |
|
||||
New name of the configuration group.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
password:
|
||||
description: |
|
||||
The password for those users on instance
|
||||
creation.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
replica_count:
|
||||
description: |
|
||||
Number of replicas to create (defaults to 1).
|
||||
in: body
|
||||
required: false
|
||||
type: integer
|
||||
replica_of:
|
||||
description: |
|
||||
ID or name of an existing instance to replicate
|
||||
from.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
replica_of_1:
|
||||
description: |
|
||||
To detach a replica, set ``replica_of`` to null.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
size:
|
||||
description: |
|
||||
The volume size, in gigabytes (GB). A valid value
|
||||
is from 1 to 50.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
slave_of:
|
||||
description: |
|
||||
To detach a replica, set ``slave_of`` to null.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
type:
|
||||
description: |
|
||||
The volume type to use. You can list the
|
||||
available volume types on your system by using the ``cinder type-
|
||||
list`` command. If you want to specify a volume type, you must
|
||||
also specify a volume size.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
updated:
|
||||
description: |
|
||||
The date and time when the resource was updated.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC.
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The UTC time zone is assumed.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
users:
|
||||
description: |
|
||||
A ``users`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
values:
|
||||
description: |
|
||||
Dictionary that lists configuration parameter
|
||||
names and associated values.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
values_1:
|
||||
description: |
|
||||
Dictionary that lists configuration parameter
|
||||
names and associated values.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
|
|
@ -1,375 +0,0 @@
|
|||
.. -*- rst -*-
|
||||
|
||||
=============
|
||||
Users (users)
|
||||
=============
|
||||
|
||||
Creates, lists all, and deletes users.
|
||||
|
||||
|
||||
Enable root user
|
||||
================
|
||||
|
||||
.. rest_method:: POST /v1.0/{accountId}/instances/{instanceId}/root
|
||||
|
||||
Enables the root user for a database instance and returns the root password.
|
||||
|
||||
This operation generates a root password for the root user and
|
||||
enables the root user to log in from any host.
|
||||
|
||||
Changes that you make as a root user can impact the database
|
||||
instance and API operations in unpredictable and detrimental ways.
|
||||
When you enable the root user, you accept the possibility that we
|
||||
cannot support your database instance. We might not be able to
|
||||
assist you if you change core MySQL settings. These changes can be,
|
||||
but are not limited to, turning off bin logs, removing users that
|
||||
we use to access your instance, and so on.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: createRoot.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-enable-root-user-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Show root-enabled status for database instance
|
||||
==============================================
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/instances/{instanceId}/root
|
||||
|
||||
Shows root-enabled status for a database instance.
|
||||
|
||||
Returns ``true`` if root user is enabled for a database instance.
|
||||
Otherwise, returns ``false``.
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: isRootEnabled.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-check-root-user-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Disable root user
|
||||
=================
|
||||
|
||||
.. rest_method:: DELETE /v1.0/{accountId}/instances/{instanceId}/root
|
||||
|
||||
Disables the root user.
|
||||
|
||||
Error response codes:202,413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: disableRoot.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Delete user
|
||||
===========
|
||||
|
||||
.. rest_method:: DELETE /v1.0/{accountId}/instances/{instanceId}/users/{name}
|
||||
|
||||
Deletes a user for a database instance.
|
||||
|
||||
Do not use periods in user names. A bug in a Python library that
|
||||
Rackspace uses that can cause incorrect user deletions to occur if
|
||||
you use a period (.) in the user name. In this case, the bug in the
|
||||
library truncates the user name to the portion from the beginning
|
||||
up to the period. For example, for the ``my.userA`` user, the bug
|
||||
truncates the user name to ``my``, and if the ``user`` exists, that
|
||||
user is incorrectly deleted.
|
||||
|
||||
Error response codes:202,413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: deleteUser.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Create user
|
||||
===========
|
||||
|
||||
.. rest_method:: POST /v1.0/{accountId}/instances/{instanceId}/users
|
||||
|
||||
Creates a user for a database instance.
|
||||
|
||||
Asynchronously provisions a new user for the database instance by
|
||||
using the configuration that you define in the request object.
|
||||
After the API validates the request and starts progress on the
|
||||
provisioning process, the call returns the ``Accepted (202)``
|
||||
response code.
|
||||
|
||||
If the API cannot fulfill the corresponding request due to
|
||||
insufficient data or data that is not valid, the API returns the
|
||||
``Bad Request (400)`` response code with information about the
|
||||
nature of the failure. You cannot recover from validation errors.
|
||||
You must correct the cause of the failure and the request again.
|
||||
|
||||
This table lists the required attributes for creating users:
|
||||
|
||||
**Required attributes for user**
|
||||
|
||||
+-----------------+---------------------------------------------------------------------+------------------------------------+----------+
|
||||
| Applies to | Name | Description | Required |
|
||||
+-----------------+---------------------------------------------------------------------+------------------------------------+----------+
|
||||
| User | name | Name of the user for the database. | Yes |
|
||||
+-----------------+---------------------------------------------------------------------+------------------------------------+----------+
|
||||
| password | User password for database access. | Yes | |
|
||||
+-----------------+---------------------------------------------------------------------+------------------------------------+----------+
|
||||
| (database) name | Name of the database that the user can access. You must specify one | No | |
|
||||
| | or more database names. | | |
|
||||
+-----------------+---------------------------------------------------------------------+------------------------------------+----------+
|
||||
|
||||
**Notes**
|
||||
|
||||
- The operation grants the user all privileges on the databases.
|
||||
|
||||
- Do not use the ``root`` user name, which is reserved.
|
||||
|
||||
These tables list the valid characters for database names, user
|
||||
names, and passwords.
|
||||
|
||||
**Valid characters in database name, user name, and password**
|
||||
|
||||
+---------------------------------------------------------------------------------------------------------------------------------+
|
||||
| Character |
|
||||
+---------------------------------------------------------------------------------------------------------------------------------+
|
||||
| Letters (upper and lower cases allowed) |
|
||||
+---------------------------------------------------------------------------------------------------------------------------------+
|
||||
| Numbers |
|
||||
+---------------------------------------------------------------------------------------------------------------------------------+
|
||||
| ``@``, ``?``, ``#``, and spaces are allowed, but **not** at the beginning and end of the database name, user name, and password |
|
||||
+---------------------------------------------------------------------------------------------------------------------------------+
|
||||
| ``_`` is allowed anywhere in the database name, user name, and password |
|
||||
+---------------------------------------------------------------------------------------------------------------------------------+
|
||||
|
||||
**Characters that are not allowed in database name, user name, and password**
|
||||
|
||||
+---------------------------------------------------------+
|
||||
| Character |
|
||||
+---------------------------------------------------------+
|
||||
| Single quotes |
|
||||
+---------------------------------------------------------+
|
||||
| Double quotes |
|
||||
+---------------------------------------------------------+
|
||||
| Back quotes |
|
||||
+---------------------------------------------------------+
|
||||
| Semicolons |
|
||||
+---------------------------------------------------------+
|
||||
| Commas |
|
||||
+---------------------------------------------------------+
|
||||
| Back slashes |
|
||||
+---------------------------------------------------------+
|
||||
| Forward slashes |
|
||||
+---------------------------------------------------------+
|
||||
| Spaces at the front or end of the user name or password |
|
||||
+---------------------------------------------------------+
|
||||
|
||||
**Length restrictions for database name, user name, and password**
|
||||
|
||||
+------------------------------+-----------------------------+
|
||||
| Restriction | Value |
|
||||
+------------------------------+-----------------------------+
|
||||
| Database name maximum length | 64 |
|
||||
+------------------------------+-----------------------------+
|
||||
| User name maximum length | 16 |
|
||||
+------------------------------+-----------------------------+
|
||||
| Password maximum length | unlimited (no restrictions) |
|
||||
+------------------------------+-----------------------------+
|
||||
|
||||
Error response codes:202,413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: createUser.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: samples/db-create-users-request.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
List database instance users
|
||||
============================
|
||||
|
||||
.. rest_method:: GET /v1.0/{accountId}/instances/{instanceId}/users
|
||||
|
||||
Lists the users in a database instance and the associated databases for that user.
|
||||
|
||||
This operation does not return system users. A system user is a
|
||||
database administrator who administers the health of the database.
|
||||
Also, this operation returns the ``root`` user only if it is
|
||||
enabled.
|
||||
|
||||
The following notes apply to MySQL users:
|
||||
|
||||
- User names can be up to 16 characters long.
|
||||
|
||||
- When you create accounts with INSERT, you must use FLUSH
|
||||
PRIVILEGES to tell the server to reload the grant tables.
|
||||
|
||||
- For additional information, See:
|
||||
`http://dev.mysql.com/doc/refman/5.1/en/user-account-
|
||||
management.html <http://dev.mysql.com/doc/refman/5.1/en/user-
|
||||
account-management.html>`_
|
||||
|
||||
|
||||
Normal response codes: 200
|
||||
Error response codes:413,405,404,403,401,400,422,503,500,501,
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: listUsers.yaml
|
||||
|
||||
- instanceId: instanceId
|
||||
- accountId: accountId
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: samples/db-list-users-response.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,258 +0,0 @@
|
|||
# variables in header
|
||||
X-Auth-Token:
|
||||
description: |
|
||||
A valid authentication token for an
|
||||
administrative user.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
|
||||
# variables in path
|
||||
tenantId_1:
|
||||
description: |
|
||||
The tenant ID.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
tokenId:
|
||||
description: |
|
||||
The authentication token for which to perform the
|
||||
operation.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
userId:
|
||||
description: |
|
||||
The user ID.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in query
|
||||
name_2:
|
||||
description: |
|
||||
Filters the response by a tenant name.
|
||||
in: query
|
||||
required: true
|
||||
type: string
|
||||
|
||||
# variables in body
|
||||
access:
|
||||
description: |
|
||||
An ``access`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description:
|
||||
description: |
|
||||
The description of the tenant. If not set, this
|
||||
value is ``null``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
email:
|
||||
description: |
|
||||
The user email.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
email_1:
|
||||
description: |
|
||||
The user email.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
enabled:
|
||||
description: |
|
||||
Indicates whether the tenant is enabled or
|
||||
disabled.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
enabled_1:
|
||||
description: |
|
||||
Indicates whether the user is enabled (``true``)
|
||||
or disabled (``false``). Default is ``true``.
|
||||
in: body
|
||||
required: false
|
||||
type: boolean
|
||||
enabled_2:
|
||||
description: |
|
||||
Indicates whether the user is enabled (``true``)
|
||||
or disabled(``false``). The default value is ``true``.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
endpoints:
|
||||
description: |
|
||||
One or more ``endpoints`` objects. Each object
|
||||
shows the ``adminURL``, ``region``, ``internalURL``, ``id``, and
|
||||
``publicURL`` for the endpoint.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
endpoints_links:
|
||||
description: |
|
||||
Links for the endpoint.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
expires:
|
||||
description: |
|
||||
The date and time when the token expires.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC. In the previous example, the offset value is ``-05:00``.
|
||||
|
||||
A ``null`` value indicates that the token never expires.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id:
|
||||
description: |
|
||||
The ID of the trust.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
id_1:
|
||||
description: |
|
||||
The user ID.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
impersonation:
|
||||
description: |
|
||||
The impersonation flag.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
issued_at:
|
||||
description: |
|
||||
The date and time when the token was issued.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC. In the previous example, the offset value is ``-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
location:
|
||||
format: uri
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
metadata:
|
||||
description: |
|
||||
A ``metadata`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name:
|
||||
description: |
|
||||
Endpoint name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_1:
|
||||
description: |
|
||||
The user name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
password:
|
||||
description: |
|
||||
The user password.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
serviceCatalog:
|
||||
description: |
|
||||
A ``serviceCatalog`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
tenant:
|
||||
description: |
|
||||
A ``tenant`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
tenantId:
|
||||
description: |
|
||||
The tenant ID.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
token:
|
||||
description: |
|
||||
A ``token`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
trust:
|
||||
description: |
|
||||
A ``trust`` object.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
trustee_user_id:
|
||||
description: |
|
||||
The trustee user ID.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
trustor_user_id:
|
||||
description: |
|
||||
The trustor user ID.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
type:
|
||||
description: |
|
||||
Endpoint type.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
user:
|
||||
description: |
|
||||
A ``user`` object, which shows the ``username``,
|
||||
``roles_links``, ``id``, ``roles``, and ``name``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
username:
|
||||
description: |
|
||||
The user name of the user.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
username_1:
|
||||
description: |
|
||||
The username of user.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
users:
|
||||
description: |
|
||||
One or more ``user`` objects.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
|
|
@ -1,167 +0,0 @@
|
|||
# variables in header
|
||||
Location:
|
||||
format: uri
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
Location_1:
|
||||
description: |
|
||||
The location.
|
||||
format: uri
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in path
|
||||
endpointTemplateId:
|
||||
description: |
|
||||
The endpoint template ID.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
roleId:
|
||||
description: |
|
||||
The role ID.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
serviceId:
|
||||
description: |
|
||||
The service ID.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
tenantId:
|
||||
description: |
|
||||
The tenant ID.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
type_1:
|
||||
description: |
|
||||
The credential type.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
userId:
|
||||
description: |
|
||||
The user ID.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in query
|
||||
{}
|
||||
|
||||
# variables in body
|
||||
description:
|
||||
description: |
|
||||
Description about the service.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description_1:
|
||||
description: |
|
||||
Description about the tenant.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description_2:
|
||||
description: |
|
||||
The role description.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
email:
|
||||
description: |
|
||||
The user email.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
enabled:
|
||||
description: |
|
||||
Indicates whether the tenant is enabled or
|
||||
disabled. Default is ``true``.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
enabled_1:
|
||||
description: |
|
||||
Indicates whether the user is enabled (``true``)
|
||||
or disabled(``false``). The default value is ``true``.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
id:
|
||||
description: |
|
||||
The UUID of the service.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id_1:
|
||||
description: |
|
||||
The tenant ID.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id_2:
|
||||
description: |
|
||||
The user ID.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id_3:
|
||||
description: |
|
||||
The role ID.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
name:
|
||||
description: |
|
||||
The service name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_1:
|
||||
description: |
|
||||
The tenant name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_2:
|
||||
description: |
|
||||
The user name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_3:
|
||||
description: |
|
||||
The role name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
roles:
|
||||
description: |
|
||||
A ``roles`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
roles_links:
|
||||
description: |
|
||||
Role links.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
type:
|
||||
description: |
|
||||
The type of the service.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
users:
|
||||
description: |
|
||||
The ``users`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
|
|
@ -1,392 +0,0 @@
|
|||
# variables in header
|
||||
{}
|
||||
|
||||
# variables in path
|
||||
access_token_id:
|
||||
description: |
|
||||
The UUID of the access token.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
consumer_id:
|
||||
description: |
|
||||
The UUID of the consumer.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
domain_id:
|
||||
description: |
|
||||
The UUID of the domain.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
endpoint_id:
|
||||
description: |
|
||||
The endpoint ID.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
group_id:
|
||||
description: |
|
||||
The UUID of the group.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
name:
|
||||
description: |
|
||||
The name of the group.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
policy_id:
|
||||
description: |
|
||||
The policy ID.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
project_id:
|
||||
description: |
|
||||
The UUID of the project.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
region_id:
|
||||
description: |
|
||||
The region ID.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
role_id:
|
||||
description: |
|
||||
The UUID of the role.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
service_id:
|
||||
description: |
|
||||
The service ID.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
user_id:
|
||||
description: |
|
||||
The UUID of the user.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in query
|
||||
{}
|
||||
|
||||
# variables in body
|
||||
blob:
|
||||
description: |
|
||||
The policy rule itself, as a serialized blob.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
endpoints:
|
||||
description: |
|
||||
An ``endpoints`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
generation:
|
||||
description: |
|
||||
A unique integer value that identifies the key.
|
||||
The generation value changes only if you set a new key. If the
|
||||
request sets the key to the same value that already exists, the
|
||||
response shows the existing generation value, which makes the
|
||||
request idempotent.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
id:
|
||||
description: |
|
||||
The ID of the trust.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id_1:
|
||||
description: |
|
||||
The endpoint UUID.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id_2:
|
||||
description: |
|
||||
The ID of the policy.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
impersonation:
|
||||
description: |
|
||||
The impersonation flag. Default is false.
|
||||
in: body
|
||||
required: false
|
||||
type: boolean
|
||||
interface:
|
||||
description: |
|
||||
The interface type, which describes the
|
||||
visibility of the endpoint. Value is: - ``public``. Visible by
|
||||
end users on a publicly available network interface. -
|
||||
``internal``. Visible by end users on an unmetered internal
|
||||
network interface. - ``admin``. Visible by administrative users
|
||||
on a secure network interface.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
links:
|
||||
description: |
|
||||
Trust links.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
links_1:
|
||||
description: |
|
||||
The links for the ``endpoints`` resource.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
links_2:
|
||||
description: |
|
||||
The links for the ``policy`` resource.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
metadata:
|
||||
description: |
|
||||
A Base64-encoded JSON object that contains these key and value
|
||||
pairs:
|
||||
|
||||
- ``source``. The identity who is requesting a ticket.
|
||||
|
||||
- ``destination``. The target for which the ticket will be valid.
|
||||
|
||||
- ``timestamp``. The current time stamp from the requester.
|
||||
|
||||
- ``nonce``. Random, single-use data. See `Cryptographic nonce
|
||||
<https://en.wikipedia.org/wiki/Cryptographic_nonce>`_.
|
||||
|
||||
The time stamp and nonce are required to prevent replay attacks.
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"source": "scheduler.host.example.com",
|
||||
"destination": "compute.host.example.com",
|
||||
"timestamp": "2012-03-26T10:01:01.720000",
|
||||
"nonce": 1234567890
|
||||
}
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
name_1:
|
||||
description: |
|
||||
The role name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_2:
|
||||
description: |
|
||||
The name of the group.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
next:
|
||||
description: |
|
||||
The ``next`` relative link for the ``endpoints``
|
||||
resource.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
oauth_expires_at:
|
||||
description: |
|
||||
The date and time when a request token expires.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC.
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
If the Identity API does not include this attribute or its value is
|
||||
``null``, the token never expires.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
policy:
|
||||
description: |
|
||||
A ``policy`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
previous:
|
||||
description: |
|
||||
The ``previous`` relative link for the
|
||||
``endpoints`` resource.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
project_id_1:
|
||||
description: |
|
||||
The ID of the project.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
region:
|
||||
description: |
|
||||
(Deprecated in v3.2) The geographic location of
|
||||
the service endpoint.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
remaining_uses:
|
||||
description: |
|
||||
Remaining uses flag. Default is null.
|
||||
in: body
|
||||
required: false
|
||||
type: boolean
|
||||
roles:
|
||||
description: |
|
||||
A roles object.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
roles_links:
|
||||
description: |
|
||||
A roles links object. Includes ``next``,
|
||||
``previous``, and ``self`` links for roles.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
self:
|
||||
description: |
|
||||
The ``self`` relative link for the ``endpoints``
|
||||
resource.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
service_id_1:
|
||||
description: |
|
||||
The UUID of the service to which the endpoint
|
||||
belongs.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
signature:
|
||||
description: |
|
||||
A Base64-encoded HMAC signature over the
|
||||
Base64-encoded request metadata object. For example:
|
||||
``Base64encode(HMAC(SigningKey, RequestMetadata))`` The long-term
|
||||
key of the requester is used for the signature. When the request
|
||||
is received, the KDS must verify the signature. To do so, the KDS
|
||||
must access the ``source`` from the request metadata to look up
|
||||
the associated long-term key to use to verify the signature. The
|
||||
KDS should not access any other data contained in the request
|
||||
metadata before verifying the signature. If the KDS fails to
|
||||
verify the signature, it risks issuing a ticket to a party who is
|
||||
impersonating the source.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
ticket:
|
||||
description: |
|
||||
The ticket is encrypted with the long-term key of the source and
|
||||
contains a Base64-encoded JSON object containing the following key
|
||||
and value pairs:
|
||||
|
||||
- ``skey``. The newly-generated Base64-encoded message signing key.
|
||||
|
||||
- ``ekey``. The newly-generated Base64-encoded message encryption
|
||||
key.
|
||||
|
||||
- ``esek``. Encrypted signing and encryption key pair for the
|
||||
receiver.
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"skey": "ZjhkuYZH8y87rzhgi7...",
|
||||
"ekey": "Fk8yksa8z8zKtakc8s...",
|
||||
"esek": "KBo8fajfo8ysad5hq2..."
|
||||
}
|
||||
|
||||
The long-term key of the destination is used to encrypt the
|
||||
``esek`` value. The ``esek`` value contains a Base64-encoded JSON
|
||||
object that contains the following key and value pairs:
|
||||
|
||||
- ``key``. The Base64-encoded random key that is used to generate
|
||||
the signing and encryption keys.
|
||||
|
||||
- ``timestamp``. The time stamp when the key was created.
|
||||
|
||||
- ``ttl``. An integer value that specifies the validity length of
|
||||
the key, in seconds.
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"key": "Afa8sad2hgsd7asv7ad...",
|
||||
"timestamp": "2012-03-26T10:01:01.720000",
|
||||
"ttl": 28800
|
||||
}
|
||||
|
||||
The ``key`` and ``timestamp`` values are used as inputs to the HKDF
|
||||
``expand`` function to generate the signing and encryption keys, as
|
||||
described in the overview on this page.
|
||||
|
||||
The ``timestamp`` and ``ttl`` values must equal the ``expiration``
|
||||
time stamp value that is contained in the response metadata.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
trust:
|
||||
description: |
|
||||
A trust object.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
trustee_user_id:
|
||||
description: |
|
||||
The trustee user ID.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
trustor_user_id:
|
||||
description: |
|
||||
The trustor user ID.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
type:
|
||||
description: |
|
||||
The MIME media type of the serialized policy
|
||||
blob. From the perspective of the Identity API, a policy blob can
|
||||
be based on any technology. In OpenStack, the ``policy.json`` blob
|
||||
(``type="application/json"``) is the conventional solution.
|
||||
However, you might want to use an alternative policy engine that
|
||||
uses a different policy language type. For example,
|
||||
``type="application/xacml+xml"``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
url:
|
||||
description: |
|
||||
The endpoint URL.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
|
|
@ -1,271 +0,0 @@
|
|||
# variables in header
|
||||
x-openstack-request-id:
|
||||
description: |
|
||||
A unique request ID that provides tracking for
|
||||
the request. Provider must configure middleware to return a
|
||||
request ID header in a response.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in path
|
||||
alias_1:
|
||||
description: |
|
||||
An alias for the extension name. For example,
|
||||
``os-server-external- events``.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in query
|
||||
{}
|
||||
|
||||
# variables in body
|
||||
access:
|
||||
description: |
|
||||
An ``access`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
alias:
|
||||
description: |
|
||||
The alias for the extension. For example,
|
||||
"FOXNSOX", "os- availability-zone", "os-extended-quotas", "os-
|
||||
share-unmanage" or "os-used-limits."
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description:
|
||||
description: |
|
||||
The description of the tenant. If not set, this
|
||||
value is ``null``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description_1:
|
||||
description: |
|
||||
The extension description.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
description_2:
|
||||
description: |
|
||||
Description about the tenant.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
enabled:
|
||||
description: |
|
||||
Indicates whether the tenant is enabled or
|
||||
disabled.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
endpoints:
|
||||
description: |
|
||||
One or more ``endpoints`` objects. Each object
|
||||
shows the ``adminURL``, ``region``, ``internalURL``, ``id``, and
|
||||
``publicURL`` for the endpoint.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
endpoints_links:
|
||||
description: |
|
||||
Links for the endpoint.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
expires:
|
||||
description: |
|
||||
The date and time when the token expires.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC. In the previous example, the offset value is ``-05:00``.
|
||||
|
||||
A ``null`` value indicates that the token never expires.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id:
|
||||
description: |
|
||||
The token ID. This field is required in the
|
||||
``token`` object.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
id_1:
|
||||
description: |
|
||||
The tenant ID.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
impersonation:
|
||||
description: |
|
||||
The impersonation flag.
|
||||
in: body
|
||||
required: false
|
||||
type: boolean
|
||||
issued_at:
|
||||
description: |
|
||||
The date and time when the token was issued.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC. In the previous example, the offset value is ``-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
metadata:
|
||||
description: |
|
||||
A ``metadata`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
name:
|
||||
description: |
|
||||
Endpoint name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_1:
|
||||
description: |
|
||||
The name of the extension. For example, "Fox In
|
||||
Socks."
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_2:
|
||||
description: |
|
||||
The tenant name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
password:
|
||||
description: |
|
||||
The password of the user. Required if you include
|
||||
the ``passwordCredentials`` object. Otherwise, you must provide a
|
||||
token.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
passwordCredentials:
|
||||
description: |
|
||||
A ``passwordCredentials`` object. To
|
||||
authenticate, you must provide either a user ID and password or a
|
||||
token.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
serviceCatalog:
|
||||
description: |
|
||||
List of ``serviceCatalog`` objects.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
tenant:
|
||||
description: |
|
||||
A ``tenant`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
tenantId:
|
||||
description: |
|
||||
The tenant ID. Both the ``tenantId`` and
|
||||
``tenantName`` attributes are optional and mutually exclusive. If
|
||||
you specify both attributes, the server returns the ``Bad Request
|
||||
(400)`` response code.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
tenantName:
|
||||
description: |
|
||||
The tenant name. Both the ``tenantId`` and
|
||||
``tenantName`` attributes are optional and mutually exclusive. If
|
||||
you specify both attributes, the server returns the ``Bad Request
|
||||
(400)`` response code.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
tenants:
|
||||
description: |
|
||||
One or more tenant Objects.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
tenants_links:
|
||||
description: |
|
||||
Links of the tenants.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
token:
|
||||
description: |
|
||||
A ``token`` object. Required if you do not
|
||||
provide a password credential.
|
||||
in: body
|
||||
required: false
|
||||
type: object
|
||||
trust:
|
||||
description: |
|
||||
A ``trust`` object.
|
||||
in: body
|
||||
required: false
|
||||
type: object
|
||||
trustee_user_id:
|
||||
description: |
|
||||
The trustee user ID.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
trustor_user_id:
|
||||
description: |
|
||||
The trustor user ID.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
type:
|
||||
description: |
|
||||
Endpoint type.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
updated:
|
||||
description: |
|
||||
The date and time stamp when the extension was
|
||||
last updated.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
user:
|
||||
description: |
|
||||
A ``user`` object, which shows the ``username``,
|
||||
``roles_links``, ``id``, ``roles``, and ``name``.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
username:
|
||||
description: |
|
||||
The user name. Required if you include the
|
||||
``passwordCredentials`` object. Otherwise, you must provide a
|
||||
token.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
|
File diff suppressed because it is too large
Load Diff
|
@ -1,188 +0,0 @@
|
|||
# variables in header
|
||||
{}
|
||||
|
||||
# variables in path
|
||||
image_id:
|
||||
description: |
|
||||
Image ID stored through the image API. Typically
|
||||
a UUID.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
owner_id:
|
||||
description: |
|
||||
Owner ID, which is the tenant ID.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in query
|
||||
changes-since:
|
||||
description: |
|
||||
Filters the image list to those images that have
|
||||
changed since a time stamp value.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
container_format_1:
|
||||
description: |
|
||||
Filters the image list by a container format. A
|
||||
valid value is ``aki``, ``ami``, ``ari``, ``bare``, ``docker``,
|
||||
``ova``, or ``ovf``.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
disk_format_1:
|
||||
description: |
|
||||
Filters the image list by a disk format. A valid
|
||||
value is ``aki``, ``ami``, ``ari``, ``iso``, ``qcow2``, ``raw``,
|
||||
``vhd``, ``vdi``, or ``vmdk``.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
name_1:
|
||||
description: |
|
||||
Filters the image list by an image name, in
|
||||
string format.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
size_max:
|
||||
description: |
|
||||
Filters the image list by a maximum image size,
|
||||
in bytes.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
size_min:
|
||||
description: |
|
||||
Filters the image list by a minimum image size,
|
||||
in bytes.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
status:
|
||||
description: |
|
||||
Filters the image list by a status. A valid value
|
||||
is ``queued``, ``saving``, ``active``, ``killed``, ``deleted``, or
|
||||
``pending_delete``.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in body
|
||||
can_share:
|
||||
description: |
|
||||
Indicates whether the owner is authorized to
|
||||
share the image. If the owner can share the image, this value is
|
||||
``true``. Otherwise, this value is ``false``. Specify the owner
|
||||
ID, which is the tenant ID, is in the request URI.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
container_format:
|
||||
description: |
|
||||
A container format defines the file format of the
|
||||
file that contains the image and metadata about the actual VM.
|
||||
For a VM image with a ``bare`` container format, the image is a
|
||||
blob of unstructured data. You can set the container format to
|
||||
one of these values: - ``aki`` Amazon kernel image. - ``ami``
|
||||
Amazon machine image. - ``ari`` Amazon ramdisk image. -
|
||||
``bare`` No container or metadata envelope for the image. -
|
||||
``docker`` Docker tar archive of the container filesystem. -
|
||||
``ova`` OVA container format. - ``ovf`` OVF container
|
||||
format.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
createImage:
|
||||
description: |
|
||||
Local file path where the image is stored.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
disk_format:
|
||||
description: |
|
||||
The disk format of a VM image is the format of
|
||||
the underlying disk image. Virtual appliance vendors have
|
||||
different formats for laying out the information contained in a VM
|
||||
disk image. You can set the disk format for your image to one of
|
||||
these values: - ``aki`` An Amazon kernel image. - ``ami``
|
||||
An Amazon machine image. - ``ari`` An Amazon ramdisk image. -
|
||||
``iso`` An archive format for the data contents of an optical
|
||||
disc, such as CDROM. - ``qcow2`` Supported by the QEMU
|
||||
emulator that can expand dynamically and supports Copy on Write.
|
||||
- ``raw`` Unstructured disk image format. - ``vhd`` VHD
|
||||
disk format, a common disk format used by VM monitors from
|
||||
VMWare, Xen, Microsoft, VirtualBox, and others. - ``vdi``
|
||||
Supported by VirtualBox VM monitor and the QEMU emulator. -
|
||||
``vmdk`` A common disk format that supported by many VM
|
||||
monitors.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
images:
|
||||
description: |
|
||||
A list of ``image`` objects.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
location:
|
||||
description: |
|
||||
A URI location for the image.
|
||||
format: uri
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
location_1:
|
||||
format: uri
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
member_id:
|
||||
description: |
|
||||
The UUID of the member with which an image is
|
||||
shared.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
memberships:
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name:
|
||||
description: |
|
||||
Name for the image. Note that the name of an
|
||||
image is not unique to an Image service node. The API cannot
|
||||
expect users to know the names of images that other users own.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
next:
|
||||
description: |
|
||||
Show the next item in the list.
|
||||
format: uri
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
previous:
|
||||
description: |
|
||||
Show the previous item in the list.
|
||||
format: uri
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
size:
|
||||
description: |
|
||||
The size of the image, in bytes.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
uri:
|
||||
description: |
|
||||
The exact location needed to get the metadata for
|
||||
the image.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,430 +0,0 @@
|
|||
# variables in header
|
||||
{}
|
||||
|
||||
# variables in path
|
||||
network_id_1:
|
||||
description: |
|
||||
The UUID of the network.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
port_id:
|
||||
description: |
|
||||
The UUID of the port.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
subnet_id_2:
|
||||
description: |
|
||||
The UUID of the subnet.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in query
|
||||
{}
|
||||
|
||||
# variables in body
|
||||
admin_state_up:
|
||||
description: |
|
||||
The administrative state of the network, which is
|
||||
up (``true``) or down (``false``).
|
||||
in: body
|
||||
required: false
|
||||
type: boolean
|
||||
admin_state_up_1:
|
||||
description: |
|
||||
The administrative status of the port, which is
|
||||
up (``true``) or down (``false``).
|
||||
in: body
|
||||
required: false
|
||||
type: boolean
|
||||
admin_state_up_2:
|
||||
description: |
|
||||
The administrative state of the network, which is
|
||||
up (``true``) or down (``false``).
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
admin_state_up_3:
|
||||
description: |
|
||||
The administrative state of the port, which is up
|
||||
(``true``) or down (``false``).
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
allowed_address_pairs:
|
||||
description: |
|
||||
A set of zero or more allowed address pairs. An
|
||||
address pair contains an IP address and MAC address.
|
||||
in: body
|
||||
required: false
|
||||
type: array
|
||||
allowed_address_pairs_1:
|
||||
description: |
|
||||
A set of zero or more allowed address pairs. An
|
||||
address pair consists of an IP address and MAC address.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
availability_zone_hints:
|
||||
description: |
|
||||
The availability zone candidate for the network.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
availability_zones:
|
||||
description: |
|
||||
The availability zone for the network.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
changed_at:
|
||||
description: |
|
||||
Time at which the network has been created.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
created_at:
|
||||
description: |
|
||||
Time at which port has been created.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
default:
|
||||
description: |
|
||||
Defines whether the provider is the default for
|
||||
the service type. If this value is ``true``, the provider is the
|
||||
default. If this value is ``false``, the provider is not the
|
||||
default.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
device_id:
|
||||
description: |
|
||||
The UUID of the device that uses this port. For
|
||||
example, a virtual server.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
device_id_1:
|
||||
description: |
|
||||
The UUID of the device that uses this port. For
|
||||
example, a virtual server.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
device_owner:
|
||||
description: |
|
||||
The UUID of the entity that uses this port. For
|
||||
example, a DHCP agent.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
device_owner_1:
|
||||
description: |
|
||||
The UUID of the entity that uses this port. For
|
||||
example, a DHCP agent.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
extra_dhcp_opts:
|
||||
description: |
|
||||
A set of zero or more extra DHCP option pairs. An
|
||||
option pair consists of an option value and name.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
fixed_ips:
|
||||
description: |
|
||||
If you specify only a subnet UUID, OpenStack
|
||||
Networking allocates an available IP from that subnet to the port.
|
||||
If you specify both a subnet UUID and an IP address, OpenStack
|
||||
Networking tries to allocate the address to the port.
|
||||
in: body
|
||||
required: false
|
||||
type: array
|
||||
fixed_ips_1:
|
||||
description: |
|
||||
The IP addresses for the port. Includes the IP
|
||||
address and UUID of the subnet.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
id:
|
||||
description: |
|
||||
The UUID of the network.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id_1:
|
||||
description: |
|
||||
The UUID of the port.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
ip_address:
|
||||
description: |
|
||||
The IP address of an allowed address pair.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
ip_address_1:
|
||||
description: |
|
||||
The fixed IP address of the port.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
location:
|
||||
description: |
|
||||
Full URL to a service or server.
|
||||
format: uri
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
mac_address:
|
||||
description: |
|
||||
The MAC address of an allowed address pair.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
mac_address_1:
|
||||
description: |
|
||||
The MAC address.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
mtu:
|
||||
description: |
|
||||
The MTU of a network resource.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
name:
|
||||
description: |
|
||||
The network name.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
name_1:
|
||||
description: |
|
||||
A symbolic name for the port.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
name_2:
|
||||
description: |
|
||||
The network name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_3:
|
||||
description: |
|
||||
The port name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_4:
|
||||
description: |
|
||||
User-facing provider name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
network:
|
||||
description: |
|
||||
A ``network`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
network_id:
|
||||
description: |
|
||||
The UUID of the network.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
network_id_2:
|
||||
description: |
|
||||
The UUID of the attached network.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
networks:
|
||||
description: |
|
||||
A list of ``network`` objects.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
opt_name:
|
||||
description: |
|
||||
The extra DHCP option name.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
opt_name_1:
|
||||
description: |
|
||||
The extra DHCP option name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
opt_value:
|
||||
description: |
|
||||
The extra DHCP option value.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
opt_value_1:
|
||||
description: |
|
||||
The extra DHCP option value.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
port:
|
||||
description: |
|
||||
A ``port`` object.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
port_security_enabled:
|
||||
description: |
|
||||
The port security status. A valid value is
|
||||
enabled (``true``) or disabled (``false``).
|
||||
in: body
|
||||
required: false
|
||||
type: boolean
|
||||
port_security_enabled_1:
|
||||
description: |
|
||||
The port security status. The status is enabled
|
||||
(``true``) or disabled (``false``).
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
ports:
|
||||
description: |
|
||||
A list of ``port`` objects.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
router:external:
|
||||
description: |
|
||||
Indicates whether this network is externally
|
||||
accessible.
|
||||
in: body
|
||||
required: false
|
||||
type: boolean
|
||||
router:external_1:
|
||||
description: |
|
||||
Indicates whether this network is externally
|
||||
accessible.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
security_groups:
|
||||
description: |
|
||||
One or more security group UUIDs.
|
||||
in: body
|
||||
required: false
|
||||
type: array
|
||||
security_groups_1:
|
||||
description: |
|
||||
The UUIDs of any attached security groups.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
service_providers:
|
||||
description: |
|
||||
A list of ``service_provider`` objects.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
service_type:
|
||||
description: |
|
||||
The service type, which is ``CORE``, ``DUMMY``,
|
||||
``FIREWALL``, ``FLAVORS``, ``L3_ROUTER_NAT``, ``LOADBALANCER``,
|
||||
``LOADBALANCERV2``, ``METERING``, ``QOS``, or ``VPN``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
shared:
|
||||
description: |
|
||||
Admin-only. Indicates whether this network is
|
||||
shared across all tenants.
|
||||
in: body
|
||||
required: false
|
||||
type: boolean
|
||||
shared_1:
|
||||
description: |
|
||||
Indicates whether this network is shared across
|
||||
all tenants. By default, only administrative users can change this
|
||||
value.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
status:
|
||||
description: |
|
||||
The network status.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
status_1:
|
||||
description: |
|
||||
The port status. Value is ``ACTIVE`` or ``DOWN``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
subnet_id:
|
||||
description: |
|
||||
If you specify only a subnet UUID, OpenStack
|
||||
Networking allocates an available IP from that subnet to the port.
|
||||
If you specify both a subnet UUID and an IP address, OpenStack
|
||||
Networking tries to allocate the address to the port.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
subnet_id_1:
|
||||
description: |
|
||||
The UUID of the subnet to which the port is
|
||||
attached.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
subnets:
|
||||
description: |
|
||||
The associated subnets.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
tenant_id:
|
||||
description: |
|
||||
The UUID of the tenant who owns the network. Only
|
||||
administrative users can specify a tenant UUID other than their
|
||||
own. You cannot change this value through authorization policies.
|
||||
in: body
|
||||
required: false
|
||||
type: string
|
||||
tenant_id_1:
|
||||
description: |
|
||||
The UUID of the tenant.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
tenant_id_2:
|
||||
description: |
|
||||
The UUID of the tenant who owns the network. Only
|
||||
administrative users can specify a tenant UUID other than their
|
||||
own.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
updated_at:
|
||||
description: |
|
||||
Time at which port has been updated.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
updated_at_1:
|
||||
description: |
|
||||
Time at which the network has been updated.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
|
|
@ -1,972 +0,0 @@
|
|||
# variables in header
|
||||
Accept:
|
||||
description: |
|
||||
Instead of using the ``format`` query parameter,
|
||||
set this header to ``application/json``, ``application/xml``, or
|
||||
``text/xml``.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
Accept-Ranges:
|
||||
description: |
|
||||
The type of ranges that the object accepts.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
Content-Disposition:
|
||||
description: |
|
||||
If set, specifies the override behavior for the
|
||||
browser. For example, this header might specify that the browser
|
||||
use a download program to save this file rather than show the
|
||||
file, which is the default.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
Content-Disposition_1:
|
||||
description: |
|
||||
If set, specifies the override behavior for the
|
||||
browser. For example, this header might specify that the browser
|
||||
use a download program to save this file rather than show the
|
||||
file, which is the default. If not set, this header is not
|
||||
returned by this operation.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
Content-Encoding:
|
||||
description: |
|
||||
If set, the value of the ``Content-Encoding``
|
||||
metadata.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
Content-Encoding_1:
|
||||
description: |
|
||||
If set, the value of the ``Content-Encoding``
|
||||
metadata. If not set, the operation does not return this header.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
Content-Length:
|
||||
description: |
|
||||
If the operation succeeds, this value is zero
|
||||
(0). If the operation fails, this value is the length of the error
|
||||
text in the response body.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
Content-Length_1:
|
||||
description: |
|
||||
Set to the length of the object content. Do not
|
||||
set if chunked transfer encoding is being used.
|
||||
in: header
|
||||
required: false
|
||||
type: integer
|
||||
Content-Length_2:
|
||||
description: |
|
||||
The length of the response body that contains the
|
||||
list of names. If the operation fails, this value is the length of
|
||||
the error text in the response body.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
Content-Length_3:
|
||||
description: |
|
||||
HEAD operations do not return content. The
|
||||
``Content-Length`` header value is not the size of the response
|
||||
body but is the size of the object, in bytes.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
Content-Length_4:
|
||||
description: |
|
||||
The length of the object content in the response
|
||||
body, in bytes.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
Content-Type:
|
||||
description: |
|
||||
Changes the MIME type for the object.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
Content-Type_1:
|
||||
description: |
|
||||
If the operation fails, this value is the MIME
|
||||
type of the error text in the response body.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
Content-Type_2:
|
||||
description: |
|
||||
The MIME type of the object.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
Content-Type_3:
|
||||
description: |
|
||||
The MIME type of the list of names. If the
|
||||
operation fails, this value is the MIME type of the error text in
|
||||
the response body.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
Date:
|
||||
description: |
|
||||
The transaction date and time.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC. In the previous example, the offset value is ``-05:00``.
|
||||
|
||||
A ``null`` value indicates that the token never expires.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
Destination:
|
||||
description: |
|
||||
The container and object name of the destination
|
||||
object in the form of ``/container/object``. You must UTF-8-encode
|
||||
and then URL-encode the names of the destination container and
|
||||
object before you include them in this header.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
ETag:
|
||||
description: |
|
||||
The MD5 checksum of the copied object content.
|
||||
The value is not quoted.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
ETag_1:
|
||||
description: |
|
||||
The MD5 checksum value of the request body. For
|
||||
example, the MD5 checksum value of the object content. You are
|
||||
strongly recommended to compute the MD5 checksum value of object
|
||||
content and include it in the request. This enables the Object
|
||||
Storage API to check the integrity of the upload. The value is not
|
||||
quoted.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
ETag_2:
|
||||
description: |
|
||||
For objects smaller than 5 GB, this value is the
|
||||
MD5 checksum of the object content. The value is not quoted. For
|
||||
manifest objects, this value is the MD5 checksum of the
|
||||
concatenated string of MD5 checksums and ETags for each of the
|
||||
segments in the manifest, and not the MD5 checksum of the content
|
||||
that was downloaded. Also the value is enclosed in double-quote
|
||||
characters. You are strongly recommended to compute the MD5
|
||||
checksum of the response body as it is received and compare this
|
||||
value with the one in the ETag header. If they differ, the content
|
||||
was corrupted, so retry the operation.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
If-Match:
|
||||
description: |
|
||||
See `Request for Comments: 2616
|
||||
<http://www.ietf.org/rfc/rfc2616.txt>`_.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
If-Modified-Since:
|
||||
description: |
|
||||
See `Request for Comments: 2616
|
||||
<http://www.ietf.org/rfc/rfc2616.txt>`_.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
If-None-Match:
|
||||
description: |
|
||||
In combination with ``Expect: 100-Continue``,
|
||||
specify an ``"If- None-Match: *"`` header to query whether the
|
||||
server already has a copy of the object before any data is sent.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
If-Unmodified-Since:
|
||||
description: |
|
||||
See `Request for Comments: 2616
|
||||
<http://www.ietf.org/rfc/rfc2616.txt>`_.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
Last-Modified:
|
||||
description: |
|
||||
The date and time when the object was created or its metadata was
|
||||
changed.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC. In the previous example, the offset value is ``-05:00``.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
Range:
|
||||
description: |
|
||||
The ranges of content to get. You can use the
|
||||
``Range`` header to get portions of data by using one or more
|
||||
range specifications. To specify many ranges, separate the range
|
||||
specifications with a comma. The types of range specifications
|
||||
are: - **Byte range specification**. Use FIRST_BYTE_OFFSET to
|
||||
specify the start of the data range, and LAST_BYTE_OFFSET to
|
||||
specify the end. You can omit the LAST_BYTE_OFFSET and if you
|
||||
do, the value defaults to the offset of the last byte of data.
|
||||
- **Suffix byte range specification**. Use LENGTH bytes to specify
|
||||
the length of the data range. The following forms of the header
|
||||
specify the following ranges of data: - ``Range: bytes=-5``. The
|
||||
last five bytes. - ``Range: bytes=10-15``. The five bytes of data
|
||||
after a 10-byte offset. - ``Range: bytes=10-15,-5``. A multi-
|
||||
part response that contains the last five bytes and the five
|
||||
bytes of data after a 10-byte offset. The ``Content-Type``
|
||||
response header contains ``multipart/byteranges``. - ``Range:
|
||||
bytes=4-6``. Bytes 4 to 6 inclusive. - ``Range: bytes=2-2``. Byte
|
||||
2, the third byte of the data. - ``Range: bytes=6-``. Byte 6 and
|
||||
after. - ``Range: bytes=1-3,2-5``. A multi-part response that
|
||||
contains bytes 1 to 3 inclusive, and bytes 2 to 5 inclusive. The
|
||||
``Content-Type`` response header contains
|
||||
``multipart/byteranges``.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
Transfer-Encoding:
|
||||
description: |
|
||||
Set to ``chunked`` to enable chunked transfer
|
||||
encoding. If used, do not set the ``Content-Length`` header to a
|
||||
non-zero value.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Account-Bytes-Used:
|
||||
description: |
|
||||
The total number of bytes that are stored in
|
||||
Object Storage for the account.
|
||||
in: header
|
||||
required: true
|
||||
type: integer
|
||||
X-Account-Container-Count:
|
||||
description: |
|
||||
The number of containers.
|
||||
in: header
|
||||
required: true
|
||||
type: integer
|
||||
X-Account-Meta-Temp-URL-Key:
|
||||
description: |
|
||||
The secret key value for temporary URLs. If not
|
||||
set, this header is not returned in the response.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Account-Meta-Temp-URL-Key-2:
|
||||
description: |
|
||||
A second secret key value for temporary URLs. If
|
||||
not set, this header is not returned in the response.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Account-Meta-Temp-URL-Key-2_1:
|
||||
description: |
|
||||
A second secret key value for temporary URLs. The
|
||||
second key enables you to rotate keys by having two active keys at
|
||||
the same time.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Account-Meta-Temp-URL-Key_1:
|
||||
description: |
|
||||
The secret key value for temporary URLs.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Account-Meta-name:
|
||||
description: |
|
||||
The custom account metadata item, where
|
||||
``{name}`` is the name of the metadata item. One ``X-Account-
|
||||
Meta- {name}`` response header appears for each metadata item (for
|
||||
each ``{name}``).
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Account-Meta-name_1:
|
||||
description: |
|
||||
The account metadata. The ``{name}`` is the name
|
||||
of metadata item that you want to add, update, or delete. To
|
||||
delete this item, send an empty value in this header. You must
|
||||
specify an ``X-Account-Meta- {name}`` header for each metadata
|
||||
item (for each ``{name}``) that you want to add, update, or
|
||||
delete.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Account-Object-Count:
|
||||
description: |
|
||||
The number of objects in the account.
|
||||
in: header
|
||||
required: true
|
||||
type: integer
|
||||
X-Auth-Token:
|
||||
description: |
|
||||
Authentication token. If you omit this header,
|
||||
your request fails unless the account owner has granted you access
|
||||
through an access control list (ACL).
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Auth-Token_1:
|
||||
description: |
|
||||
Authentication token.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
X-Container-Bytes-Used:
|
||||
description: |
|
||||
The total number of bytes used.
|
||||
in: header
|
||||
required: true
|
||||
type: integer
|
||||
X-Container-Meta-Access-Control-Allow-Origin:
|
||||
description: |
|
||||
Originating URLs allowed to make cross-origin
|
||||
requests (CORS), separated by spaces. This heading applies to the
|
||||
container only, and all objects within the container with this
|
||||
header applied are CORS-enabled for the allowed origin URLs. A
|
||||
browser (user-agent) typically issues a `preflighted request
|
||||
<https://developer.mozilla.org/en-
|
||||
US/docs/HTTP/Access_control_CORS>`_ , which is an OPTIONS call
|
||||
that verifies the origin is allowed to make the request. The
|
||||
Object Storage service returns 200 if the originating URL is
|
||||
listed in this header parameter, and issues a 401 if the
|
||||
originating URL is not allowed to make a cross-origin request.
|
||||
Once a 200 is returned, the browser makes a second request to the
|
||||
Object Storage service to retrieve the CORS-enabled object.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Meta-Access-Control-Expose-Headers:
|
||||
description: |
|
||||
Headers the Object Storage service exposes to the
|
||||
browser (technically, through the ``user-agent`` setting), in the
|
||||
request response, separated by spaces. By default the Object
|
||||
Storage service returns the following values for this header: -
|
||||
All “simple response headers” as listed on
|
||||
`http://www.w3.org/TR/cors/#simple-response-header
|
||||
<http://www.w3.org/TR/cors/#simple-response-header>`_. - The
|
||||
headers ``etag``, ``x-timestamp``, ``x-trans-id``. - All metadata
|
||||
headers (``X-Container-Meta-*`` for containers and ``X-Object-
|
||||
Meta-*`` for objects) headers listed in ``X-Container- Meta-
|
||||
Access-Control-Expose-Headers``.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Meta-Access-Control-Max-Age:
|
||||
description: |
|
||||
Maximum time for the origin to hold the preflight
|
||||
results. A browser may make an OPTIONS call to verify the origin
|
||||
is allowed to make the request. Set the value to an integer number
|
||||
of seconds after the time that the request was received.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Meta-Quota-Bytes:
|
||||
description: |
|
||||
Sets maximum size of the container, in bytes.
|
||||
Typically these values are set by an administrator. Returns a 413
|
||||
response (request entity too large) when an object PUT operation
|
||||
exceeds this quota value.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Meta-Quota-Count:
|
||||
description: |
|
||||
Sets maximum object count of the container.
|
||||
Typically these values are set by an administrator. Returns a 413
|
||||
response (request entity too large) when an object PUT operation
|
||||
exceeds this quota value.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Meta-Temp-URL-Key:
|
||||
description: |
|
||||
The secret key value for temporary URLs.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Meta-Temp-URL-Key-2:
|
||||
description: |
|
||||
A second secret key value for temporary URLs. The
|
||||
second key enables you to rotate keys by having two active keys at
|
||||
the same time.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Meta-Web-Directory-Type:
|
||||
description: |
|
||||
Sets the content-type of directory marker
|
||||
objects. If the header is not set, default is
|
||||
``application/directory``. Directory marker objects are 0-byte
|
||||
objects that represent directories to create a simulated
|
||||
hierarchical structure. For example, if you set ``"X-Container-
|
||||
Meta-Web-Directory- Type: text/directory"``, Object Storage treats
|
||||
0-byte objects with a content-type of ``text/directory`` as
|
||||
directories rather than objects.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Meta-name:
|
||||
description: |
|
||||
The container metadata, where ``{name}`` is the
|
||||
name of metadata item. You must specify an ``X-Container-Meta-
|
||||
{name}`` header for each metadata item (for each ``{name}``) that
|
||||
you want to add or update.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Meta-name_1:
|
||||
description: |
|
||||
The custom container metadata item, where
|
||||
``{name}`` is the name of the metadata item. One ``X-Container-
|
||||
Meta- {name}`` response header appears for each metadata item (for
|
||||
each ``{name}``).
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
X-Container-Object-Count:
|
||||
description: |
|
||||
The number of objects.
|
||||
in: header
|
||||
required: true
|
||||
type: integer
|
||||
X-Container-Read:
|
||||
description: |
|
||||
Sets a container access control list (ACL) that grants read access.
|
||||
Container ACLs are available on any Object Storage cluster, and are
|
||||
enabled by container rather than by cluster.
|
||||
|
||||
To set the container read ACL:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ curl -X {PUT|POST} -i -H "X-Auth-Token: TOKEN" -H \
|
||||
"X-Container-Read: ACL" STORAGE_URL/CONTAINER
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ curl -X PUT -i \
|
||||
-H "X-Auth-Token: 0101010101" \
|
||||
-H "X-Container-Read: .r:*" \
|
||||
http://swift.example.com/v1/AUTH_bob/read_container
|
||||
|
||||
In the command, specify the ACL in the ``X-Container-Read`` header,
|
||||
as follows:
|
||||
|
||||
- ``.r:*`` All referrers.
|
||||
|
||||
- ``.r:example.com,swift.example.com`` Comma-separated list of
|
||||
referrers.
|
||||
|
||||
- ``.rlistings`` Container listing access.
|
||||
|
||||
- ``AUTH_username`` Access to a user who authenticates through a
|
||||
legacy or non-OpenStack-Identity-based authentication system.
|
||||
|
||||
- ``LDAP_`` Access to all users who authenticate through an LDAP-
|
||||
based legacy or non-OpenStack-Identity-based authentication
|
||||
system.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Read_1:
|
||||
description: |
|
||||
The ACL that grants read access. If not set, this
|
||||
header is not returned by this operation.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Sync-Key:
|
||||
description: |
|
||||
Sets the secret key for container
|
||||
synchronization. If you remove the secret key, synchronization is
|
||||
halted.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Sync-Key_1:
|
||||
description: |
|
||||
The secret key for container synchronization. If
|
||||
not set, this header is not returned by this operation.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Sync-To:
|
||||
description: |
|
||||
Sets the destination for container
|
||||
synchronization. Used with the secret key indicated in the ``X
|
||||
-Container-Sync-Key`` header. If you want to stop a container from
|
||||
synchronizing, send a blank value for the ``X-Container-Sync-Key``
|
||||
header.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Sync-To_1:
|
||||
description: |
|
||||
The destination for container synchronization. If
|
||||
not set, this header is not returned by this operation.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Write:
|
||||
description: |
|
||||
Sets an ACL that grants write access.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Container-Write_1:
|
||||
description: |
|
||||
The ACL that grants write access. If not set,
|
||||
this header is not returned by this operation.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Copied-From:
|
||||
description: |
|
||||
For a copied object, shows the container and
|
||||
object name from which the new object was copied. The value is in
|
||||
the ``{container}/{object}`` format.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Copied-From-Last-Modified:
|
||||
description: |
|
||||
For a copied object, the date and time in `UNIX
|
||||
Epoch time stamp format
|
||||
<https://en.wikipedia.org/wiki/Unix_time>`_ when the container and
|
||||
object name from which the new object was copied was last
|
||||
modified. For example, ``1440619048`` is equivalent to ``Mon,
|
||||
Wed, 26 Aug 2015 19:57:28 GMT``.
|
||||
in: header
|
||||
required: false
|
||||
type: integer
|
||||
X-Copy-From:
|
||||
description: |
|
||||
If set, this is the name of an object used to
|
||||
create the new object by copying the ``X-Copy-From`` object. The
|
||||
value is in form ``{container}/{object}``. You must UTF-8-encode
|
||||
and then URL-encode the names of the container and object before
|
||||
you include them in the header. Using PUT with ``X-Copy-From``
|
||||
has the same effect as using the COPY operation to copy an object.
|
||||
Using ``Range`` header with ``X-Copy-From`` will create a new
|
||||
partial copied object with bytes set by ``Range``.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Delete-After:
|
||||
description: |
|
||||
The number of seconds after which the system
|
||||
removes the object. Internally, the Object Storage system stores
|
||||
this value in the ``X -Delete-At`` metadata item.
|
||||
in: header
|
||||
required: false
|
||||
type: integer
|
||||
X-Delete-At:
|
||||
description: |
|
||||
The date and time in `UNIX Epoch time stamp
|
||||
format <https://en.wikipedia.org/wiki/Unix_time>`_ when the system
|
||||
removes the object. For example, ``1440619048`` is equivalent to
|
||||
``Mon, Wed, 26 Aug 2015 19:57:28 GMT``.
|
||||
in: header
|
||||
required: false
|
||||
type: integer
|
||||
X-Delete-At_1:
|
||||
description: |
|
||||
If set, the date and time in `UNIX Epoch time
|
||||
stamp format <https://en.wikipedia.org/wiki/Unix_time>`_ when the
|
||||
system deletes the object. For example, ``1440619048`` is
|
||||
equivalent to ``Mon, Wed, 26 Aug 2015 19:57:28 GMT``. If not set,
|
||||
this operation does not return this header.
|
||||
in: header
|
||||
required: false
|
||||
type: integer
|
||||
X-Detect-Content-Type:
|
||||
description: |
|
||||
If set to ``true``, Object Storage guesses the
|
||||
content type based on the file extension and ignores the value
|
||||
sent in the ``Content- Type`` header, if present.
|
||||
in: header
|
||||
required: false
|
||||
type: boolean
|
||||
X-Fresh-Metadata:
|
||||
description: |
|
||||
Enables object creation that omits existing user
|
||||
metadata. If set to ``true``, the COPY request creates an object
|
||||
without existing user metadata. Default value is ``false``.
|
||||
in: header
|
||||
required: false
|
||||
type: boolean
|
||||
X-Newest:
|
||||
description: |
|
||||
If set to true , Object Storage queries all
|
||||
replicas to return the most recent one. If you omit this header,
|
||||
Object Storage responds faster after it finds one valid replica.
|
||||
Because setting this header to true is more expensive for the back
|
||||
end, use it only when it is absolutely needed.
|
||||
in: header
|
||||
required: false
|
||||
type: boolean
|
||||
X-Object-Manifest:
|
||||
description: |
|
||||
Set to specify that this is a dynamic large
|
||||
object manifest object. The value is the container and object name
|
||||
prefix of the segment objects in the form ``container/prefix``.
|
||||
You must UTF-8-encode and then URL-encode the names of the
|
||||
container and prefix before you include them in this header.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Object-Manifest_1:
|
||||
description: |
|
||||
If set, to this is a dynamic large object
|
||||
manifest object. The value is the container and object name prefix
|
||||
of the segment objects in the form ``container/prefix``.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Object-Meta-name:
|
||||
description: |
|
||||
The object metadata, where ``{name}`` is the name
|
||||
of the metadata item. You must specify an ``X-Object-Meta-
|
||||
{name}`` header for each metadata ``{name}`` item that you want to
|
||||
add or update.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Object-Meta-name_1:
|
||||
description: |
|
||||
The custom object metadata item, where ``{name}``
|
||||
is the name of the metadata item. One ``X-Object-Meta- {name}``
|
||||
response header appears for each metadata ``{name}`` item.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
X-Remove-Container-name:
|
||||
description: |
|
||||
Removes the metadata item named ``{name}``. For
|
||||
example, ``X -Remove-Container-Read`` removes the ``X-Container-
|
||||
Read`` metadata item.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Remove-Versions-Location:
|
||||
description: |
|
||||
Set to any value to disable versioning.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Static-Large-Object:
|
||||
description: |
|
||||
Set to ``true`` if this object is a static large
|
||||
object manifest object.
|
||||
in: header
|
||||
required: true
|
||||
type: boolean
|
||||
X-Timestamp:
|
||||
description: |
|
||||
The date and time in `UNIX Epoch time stamp
|
||||
format <https://en.wikipedia.org/wiki/Unix_time>`_ when the
|
||||
account, container, or object was initially created as a current
|
||||
version. For example, ``1440619048`` is equivalent to ``Mon, Wed,
|
||||
26 Aug 2015 19:57:28 GMT``.
|
||||
in: header
|
||||
required: true
|
||||
type: integer
|
||||
X-Trans-Id:
|
||||
description: |
|
||||
A unique transaction ID for this request. Your
|
||||
service provider might need this value if you report a problem.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
X-Trans-Id-Extra:
|
||||
description: |
|
||||
Extra transaction information. Use the ``X-Trans-
|
||||
Id-Extra`` request header to include extra information to help you
|
||||
debug any errors that might occur with large object upload and
|
||||
other Object Storage transactions. Object Storage appends the
|
||||
first 32 characters of the ``X-Trans-Id- Extra`` request header
|
||||
value to the transaction ID value in the generated ``X-Trans-Id``
|
||||
response header. You must UTF-8-encode and then URL-encode the
|
||||
extra transaction information before you include it in the ``X
|
||||
-Trans-Id-Extra`` request header. For example, you can include
|
||||
extra transaction information when you upload `large objects
|
||||
<http://docs.openstack.org/user-
|
||||
guide/cli_swift_large_object_creation.html>`_ such as images. When
|
||||
you upload each segment and the manifest, include the same value
|
||||
in the ``X-Trans-Id-Extra`` request header. If an error occurs,
|
||||
you can find all requests that are related to the large object
|
||||
upload in the Object Storage logs. You can also use ``X-Trans-Id-
|
||||
Extra`` strings to help operators debug requests that fail to
|
||||
receive responses. The operator can search for the extra
|
||||
information in the logs.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Versions-Location:
|
||||
description: |
|
||||
Enables versioning on this container. The value
|
||||
is the name of another container. You must UTF-8-encode and then
|
||||
URL-encode the name before you include it in the header. To
|
||||
disable versioning, set the header to an empty string.
|
||||
in: header
|
||||
required: false
|
||||
type: string
|
||||
X-Versions-Location_1:
|
||||
description: |
|
||||
Enables versioning on this container. The value
|
||||
is the name of another container. You must UTF-8-encode and then
|
||||
URL-encode the name before you include it in the header. To
|
||||
disable versioning, set the header to an empty string.
|
||||
in: header
|
||||
required: true
|
||||
type: string
|
||||
|
||||
# variables in path
|
||||
account:
|
||||
description: |
|
||||
The unique name for the account. An account is
|
||||
also known as the project or tenant.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
container:
|
||||
description: |
|
||||
The unique name for the container. The container
|
||||
name must be from 1 to 256 characters long and can start with any
|
||||
character and contain any pattern. Character set must be UTF-8.
|
||||
The container name cannot contain a slash (``/``) character
|
||||
because this character delimits the container and object name. For
|
||||
example, ``/account/container/object``.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
object:
|
||||
description: |
|
||||
The unique name for the object.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in query
|
||||
delimiter:
|
||||
description: |
|
||||
Delimiter value, which returns the object names
|
||||
that are nested in the container. If you do not set a prefix and
|
||||
set the delimiter to "/" you may get unexpected results where all
|
||||
the objects are returned instead of only those with the delimiter
|
||||
set.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
end_marker:
|
||||
description: |
|
||||
For a string value, x , returns container names
|
||||
that are less than the marker value.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
filename:
|
||||
description: |
|
||||
Overrides the default file name. Object Storage
|
||||
generates a default file name for GET temporary URLs that is based
|
||||
on the object name. Object Storage returns this value in the
|
||||
``Content-Disposition`` response header. Browsers can interpret
|
||||
this file name value as a file attachment to save. For more
|
||||
information about temporary URLs, see `Temporary URL middleware
|
||||
<http://docs.openstack.org/developer/
|
||||
swift/api/temporary_url_middleware.html>`_.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
format:
|
||||
description: |
|
||||
The response format. Valid values are ``json``,
|
||||
``xml``, or ``plain``. The default is ``plain``. If you append
|
||||
the ``format=xml`` or ``format=json`` query parameter to the
|
||||
storage account URL, the response shows extended container
|
||||
information serialized in that format. If you append the
|
||||
``format=plain`` query parameter, the response lists the container
|
||||
names separated by newlines.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
limit:
|
||||
description: |
|
||||
For an integer value n , limits the number of
|
||||
results to n .
|
||||
in: query
|
||||
required: false
|
||||
type: integer
|
||||
marker:
|
||||
description: |
|
||||
For a string value, x , returns container names
|
||||
that are greater than the marker value.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
multipart-manifest:
|
||||
description: |
|
||||
If ``?multipart-manifest=put``, the object is a
|
||||
static large object manifest and the body contains the manifest.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
multipart-manifest_1:
|
||||
description: |
|
||||
If you include the ``multipart-manifest=delete``
|
||||
query parameter and the object is a static large object, the
|
||||
segment objects and manifest object are deleted. If you omit the
|
||||
``multipart- manifest=delete`` query parameter and the object is a
|
||||
static large object, the manifest object is deleted but the
|
||||
segment objects are not deleted. For a bulk delete, the response
|
||||
body looks the same as it does for a normal bulk delete. In
|
||||
contrast, a plain object DELETE response has an empty body.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
multipart-manifest_2:
|
||||
description: |
|
||||
If you include the ``multipart-manifest=get``
|
||||
query parameter and the object is a large object, the object
|
||||
contents are not returned. Instead, the manifest is returned in
|
||||
the ``X-Object-Manifest`` response header for dynamic large
|
||||
objects or in the response body for static large objects.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
path:
|
||||
description: |
|
||||
For a string value, returns the object names that
|
||||
are nested in the pseudo path.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
prefix:
|
||||
description: |
|
||||
Prefix value. Named items in the response begin
|
||||
with this value.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
swiftinfo_expires:
|
||||
description: |
|
||||
Filters the response by the expiration date and
|
||||
time in `UNIX Epoch time stamp format
|
||||
<https://en.wikipedia.org/wiki/Unix_time>`_. For example,
|
||||
``1440619048`` is equivalent to ``Mon, Wed, 26 Aug 2015 19:57:28
|
||||
GMT``.
|
||||
in: query
|
||||
required: false
|
||||
type: integer
|
||||
swiftinfo_sig:
|
||||
description: |
|
||||
A hash-based message authentication code (HMAC)
|
||||
that enables access to administrator-only information. To use this
|
||||
parameter, the ``swiftinfo_expires`` parameter is also required.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
temp_url_expires:
|
||||
description: |
|
||||
The date and time in `UNIX Epoch time stamp
|
||||
format <https://en.wikipedia.org/wiki/Unix_time>`_ when the
|
||||
signature for temporary URLs expires. For example, ``1440619048``
|
||||
is equivalent to ``Mon, Wed, 26 Aug 2015 19:57:28 GMT``. For more
|
||||
information about temporary URLs, see `Temporary URL middleware
|
||||
<http://docs.openstack.org/developer/swift/api/temporary
|
||||
_url_middleware.html>`_.
|
||||
in: query
|
||||
required: true
|
||||
type: integer
|
||||
temp_url_sig:
|
||||
description: |
|
||||
Used with temporary URLs to sign the request with
|
||||
an HMAC-SHA1 cryptographic signature that defines the allowed HTTP
|
||||
method, expiration date, full path to the object, and the secret
|
||||
key for the temporary URL. For more information about temporary
|
||||
URLs, see `Temporary URL middleware
|
||||
<http://docs.openstack.org/developer/swif
|
||||
t/api/temporary_url_middleware.html>`_.
|
||||
in: query
|
||||
required: true
|
||||
type: string
|
||||
|
||||
# variables in body
|
||||
bytes:
|
||||
description: |
|
||||
The total number of bytes that are stored in
|
||||
Object Storage for the account.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
content_type:
|
||||
description: |
|
||||
The content type of the object.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
count:
|
||||
description: |
|
||||
The number of objects in the container.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
hash:
|
||||
description: |
|
||||
The MD5 checksum value of the object content.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
last_modified:
|
||||
description: |
|
||||
The date and time when the object was last modified.
|
||||
|
||||
The date and time stamp format is `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_:
|
||||
|
||||
::
|
||||
|
||||
CCYY-MM-DDThh:mm:ss±hh:mm
|
||||
|
||||
For example, ``2015-08-27T09:49:58-05:00``.
|
||||
|
||||
The ``±hh:mm`` value, if included, is the time zone as an offset
|
||||
from UTC. In the previous example, the offset value is ``-05:00``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name:
|
||||
description: |
|
||||
The name of the container.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,734 +0,0 @@
|
|||
# variables in header
|
||||
{}
|
||||
|
||||
# variables in path
|
||||
alarm_id_1:
|
||||
description: |
|
||||
The UUID of the alarm.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
message_id_1:
|
||||
description: |
|
||||
The UUID of the message.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
meter_name:
|
||||
description: |
|
||||
The name of the meter.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
resource_id_2:
|
||||
description: |
|
||||
The UUID of the resource.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
sample_id:
|
||||
description: |
|
||||
The UUID of the sample.
|
||||
in: path
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# variables in query
|
||||
aggregate:
|
||||
description: |
|
||||
A list of selectable aggregation functions to apply.
|
||||
|
||||
For example:
|
||||
|
||||
::
|
||||
|
||||
GET /v2/meters/METER_NAME/statistics?aggregate.func=cardinality
|
||||
&
|
||||
aggregate.param=resource_id
|
||||
&
|
||||
aggregate.func=cardinality
|
||||
&
|
||||
aggregate.param=project_id
|
||||
in: query
|
||||
required: false
|
||||
type: object
|
||||
data:
|
||||
description: |
|
||||
An alarm within the request body.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
direct:
|
||||
description: |
|
||||
Indicates whether the samples are POST ed
|
||||
directly to storage. Set ``?direct=True`` to POST the samples
|
||||
directly to storage.
|
||||
in: query
|
||||
required: false
|
||||
type: string
|
||||
groupby:
|
||||
description: |
|
||||
Fields for group by aggregation.
|
||||
in: query
|
||||
required: false
|
||||
type: object
|
||||
limit:
|
||||
description: |
|
||||
Limits the maximum number of samples that the response returns.
|
||||
|
||||
For example:
|
||||
|
||||
::
|
||||
|
||||
GET /v2/events?limit=1000
|
||||
in: query
|
||||
required: false
|
||||
type: integer
|
||||
limit_1:
|
||||
description: |
|
||||
Requests a page size of items. Returns a number
|
||||
of items up to a limit value. Use the ``limit`` parameter to make
|
||||
an initial limited request and use the ID of the last-seen item
|
||||
from the response as the ``marker`` parameter value in a
|
||||
subsequent limited request.
|
||||
in: query
|
||||
required: false
|
||||
type: integer
|
||||
meter_links:
|
||||
description: |
|
||||
Set ``?meter_links=1`` to return a self link and
|
||||
related meter links.
|
||||
in: query
|
||||
required: false
|
||||
type: integer
|
||||
period:
|
||||
description: |
|
||||
The period, in seconds, for which you want
|
||||
statistics.
|
||||
in: query
|
||||
required: false
|
||||
type: integer
|
||||
q:
|
||||
description: |
|
||||
Filters the response by one or more arguments.
|
||||
For example: ``?q.field=Foo & q.value=my_text``.
|
||||
in: query
|
||||
required: false
|
||||
type: array
|
||||
q_1:
|
||||
description: |
|
||||
Filters the response by one or more event arguments.
|
||||
|
||||
For example:
|
||||
|
||||
::
|
||||
|
||||
GET /v2/events?q.field=Foo
|
||||
&
|
||||
q.value=my_text
|
||||
in: query
|
||||
required: false
|
||||
type: array
|
||||
samples:
|
||||
description: |
|
||||
A list of samples.
|
||||
in: query
|
||||
required: false
|
||||
type: array
|
||||
state_1:
|
||||
description: |
|
||||
The alarm state. A valid value is ``ok``,
|
||||
``alarm``, or ``insufficient data``.
|
||||
in: query
|
||||
required: true
|
||||
type: string
|
||||
|
||||
# variables in body
|
||||
alarm_actions:
|
||||
description: |
|
||||
The list of actions that the alarm performs.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
alarm_id:
|
||||
description: |
|
||||
The UUID of the alarm.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
alarm_storage:
|
||||
description: |
|
||||
Defines the capabilities for the storage that
|
||||
stores persisting alarm definitions. A value of ``true`` indicates
|
||||
that the capability is available.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
alarms:history:query:complex:
|
||||
description: |
|
||||
If ``true``, the complex query capability for
|
||||
alarm history is available for the configured database back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
alarms:history:query:simple:
|
||||
description: |
|
||||
If ``true``, the simple query capability for
|
||||
alarm history is available for the configured database back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
alarms:query:complex:
|
||||
description: |
|
||||
If ``true``, the complex query capability for
|
||||
alarm definitions is available for the configured database back
|
||||
end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
alarms:query:simple:
|
||||
description: |
|
||||
If ``true``, the simple query capability for
|
||||
alarm definitions is available for the configured database back
|
||||
end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
api:
|
||||
description: |
|
||||
A set of key and value pairs that contain the API
|
||||
capabilities for the configured storage driver.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
avg:
|
||||
description: |
|
||||
The average of all volume values in the data.
|
||||
in: body
|
||||
required: true
|
||||
type: number
|
||||
combination_rule:
|
||||
description: |
|
||||
The rules for the combination alarm type.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
count:
|
||||
description: |
|
||||
The number of samples seen.
|
||||
in: body
|
||||
required: true
|
||||
type: integer
|
||||
description:
|
||||
description: |
|
||||
Describes the alarm.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
duration:
|
||||
description: |
|
||||
The number of seconds between the oldest and
|
||||
newest date and time stamp.
|
||||
in: body
|
||||
required: true
|
||||
type: number
|
||||
duration_end:
|
||||
description: |
|
||||
The date and time in UTC format of the query end
|
||||
time.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
duration_start:
|
||||
description: |
|
||||
The date and time in UTC format of the query
|
||||
start time.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
enabled:
|
||||
description: |
|
||||
If ``true``, evaluation and actioning is enabled
|
||||
for the alarm.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
event_storage:
|
||||
description: |
|
||||
If ``true``, the capabilities for the storage
|
||||
that stores persisting events is available.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
event_type:
|
||||
description: |
|
||||
The dotted string that represents the event.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
events:query:simple:
|
||||
description: |
|
||||
If ``true``, the simple query capability for
|
||||
events is available for the configured database back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
generated:
|
||||
description: |
|
||||
The date and time when the event occurred.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
id:
|
||||
description: |
|
||||
The UUID of the sample.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
insufficient_data_actions:
|
||||
description: |
|
||||
The list of actions that the alarm performs when
|
||||
the alarm state is ``insufficient_data``.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
links:
|
||||
description: |
|
||||
A list that contains a self link and associated
|
||||
meter links.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
max:
|
||||
description: |
|
||||
The maximum volume seen in the data.
|
||||
in: body
|
||||
required: true
|
||||
type: number
|
||||
message_id:
|
||||
description: |
|
||||
The UUID of the message.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
metadata:
|
||||
description: |
|
||||
An arbitrary set of one or more metadata key and
|
||||
value pairs that are associated with the sample.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
metadata_1:
|
||||
description: |
|
||||
A set of one or more arbitrary metadata key and
|
||||
value pairs that are associated with the resource.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
meter:
|
||||
description: |
|
||||
The meter name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
meter_id:
|
||||
description: |
|
||||
The UUID of the meter.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
meters:query:complex:
|
||||
description: |
|
||||
If ``true``, the complex query capability for
|
||||
meters is available for the configured database back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
meters:query:metadata:
|
||||
description: |
|
||||
If ``true``, the simple query capability for the
|
||||
metadata of meters is available for the configured database back
|
||||
end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
meters:query:simple:
|
||||
description: |
|
||||
If ``true``, the simple query capability for
|
||||
meters is available for the configured database back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
min:
|
||||
description: |
|
||||
The minimum volume seen in the data.
|
||||
in: body
|
||||
required: true
|
||||
type: number
|
||||
name:
|
||||
description: |
|
||||
The name of the alarm.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
name_1:
|
||||
description: |
|
||||
The meter name.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
ok_actions:
|
||||
description: |
|
||||
The list of actions that the alarm performs when
|
||||
the alarm state is ``ok``.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
period_end:
|
||||
description: |
|
||||
The period end date and time in UTC format.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
period_start:
|
||||
description: |
|
||||
The period start date and time in UTC format.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
project_id:
|
||||
description: |
|
||||
The UUID of the project or tenant that owns the
|
||||
resource.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
project_id_1:
|
||||
description: |
|
||||
The UUID of the project.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
project_id_2:
|
||||
description: |
|
||||
The UUID of the owning project or tenant.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
raw:
|
||||
description: |
|
||||
A dictionary object that stores event messages
|
||||
for future evaluation.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
recorded_at:
|
||||
description: |
|
||||
The date and time when the sample was recorded.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
repeat_actions:
|
||||
description: |
|
||||
If set to ``true``, the alarm notifications are
|
||||
repeated. Otherwise, this value is ``false``.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
resource_id:
|
||||
description: |
|
||||
The UUID of the resource for which the
|
||||
measurements are taken.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
resource_id_1:
|
||||
description: |
|
||||
The UUID of the resource.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
resources:query:complex:
|
||||
description: |
|
||||
If ``true``, the complex query capability for
|
||||
resources is available for the configured database back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
resources:query:metadata:
|
||||
description: |
|
||||
If ``true``, the simple query capability for the
|
||||
metadata of resources is available for the configured database
|
||||
back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
resources:query:simple:
|
||||
description: |
|
||||
If ``true``, the simple query capability for
|
||||
resources is available for the configured database back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
samples:query:complex:
|
||||
description: |
|
||||
If ``true``, the complex query capability for
|
||||
samples is available for the configured database back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
samples:query:metadata:
|
||||
description: |
|
||||
If ``true``, the simple query capability for the
|
||||
metadata of samples is available for the configured database back
|
||||
end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
samples:query:simple:
|
||||
description: |
|
||||
If ``true``, the simple query capability for
|
||||
samples is available for the configured database back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
source:
|
||||
description: |
|
||||
The name of the source that identifies where the
|
||||
sample comes from.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
source_1:
|
||||
description: |
|
||||
The name of the source from which the meter came.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
source_2:
|
||||
description: |
|
||||
The name of the source from which the resource
|
||||
came.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
state:
|
||||
description: |
|
||||
The state of the alarm.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
state_timestamp:
|
||||
description: |
|
||||
The date and time of the alarm state.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
statistics:aggregation:selectable:avg:
|
||||
description: |
|
||||
If ``true``, the ``avg`` capability is available
|
||||
for the configured database back end. Use the ``avg`` capability
|
||||
to get average values for samples.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
statistics:aggregation:selectable:cardinality:
|
||||
description: |
|
||||
If ``true``, the ``cardinality`` capability is
|
||||
available for the configured database back end. Use the
|
||||
``cardinality`` capability to get cardinality for samples.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
statistics:aggregation:selectable:count:
|
||||
description: |
|
||||
If ``true``, the ``count`` capability is
|
||||
available for the configured database back end. Use the ``count``
|
||||
capability to calculate the number of samples for a query.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
statistics:aggregation:selectable:max:
|
||||
description: |
|
||||
If ``true``, the ``max`` capability is available
|
||||
for the configured database back end. . Use the ``max`` capability
|
||||
to calculate the maximum value for a query.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
statistics:aggregation:selectable:min:
|
||||
description: |
|
||||
If ``true``, the ``min`` capability is available
|
||||
for the configured database back end. Use the ``min`` capability
|
||||
to calculate the minimum value for a query.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
statistics:aggregation:selectable:quartile:
|
||||
description: |
|
||||
If ``true``, the ``quartile`` capability is
|
||||
available for the configured database back end. Use the
|
||||
``quartile`` capability to calculate the quartile of sample
|
||||
volumes for a query.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
statistics:aggregation:selectable:stddev:
|
||||
description: |
|
||||
If ``true``, the ``stddev`` capability is
|
||||
available for the configured database back end. Use the ``stddev``
|
||||
capability to calculate the standard deviation of sample volumes
|
||||
for a query.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
statistics:aggregation:selectable:sum:
|
||||
description: |
|
||||
If ``true``, the ``sum`` capability is available
|
||||
for the configured database back end. Use the ``sum`` capability
|
||||
to calculate the sum of sample volumes for a query.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
statistics:aggregation:standard:
|
||||
description: |
|
||||
If ``true``, the ``standard`` set of aggregation
|
||||
capability is available for the configured database back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
statistics:groupby:
|
||||
description: |
|
||||
If ``true``, the ``groupby`` capability is
|
||||
available for calculating statistics for the configured database
|
||||
back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
statistics:query:complex:
|
||||
description: |
|
||||
If ``true``, the complex query capability for
|
||||
statistics is available for the configured database back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
statistics:query:metadata:
|
||||
description: |
|
||||
If ``true``, the simple query capability for the
|
||||
sample metadata that is used to calculate statistics is available
|
||||
for the configured database back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
statistics:query:simple:
|
||||
description: |
|
||||
If ``true``, the simple query capability for
|
||||
statistics is available for the configured database back end.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
storage:
|
||||
description: |
|
||||
If ``true``, the capabilities for the storage
|
||||
that stores persisting samples is available.
|
||||
in: body
|
||||
required: true
|
||||
type: object
|
||||
storage:production_ready:
|
||||
description: |
|
||||
If ``true``, the database back end is ready to
|
||||
use in a production environment.
|
||||
in: body
|
||||
required: true
|
||||
type: boolean
|
||||
sum:
|
||||
description: |
|
||||
The total of all of the volume values seen in the
|
||||
data.
|
||||
in: body
|
||||
required: true
|
||||
type: number
|
||||
threshold_rule:
|
||||
description: |
|
||||
The rules for the threshold alarm type.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
timestamp:
|
||||
description: |
|
||||
The date and time in UTC format when the
|
||||
measurement was made.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
timestamp_1:
|
||||
description: |
|
||||
The date and time of the alarm.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
traits:
|
||||
description: |
|
||||
A list of objects. Each object contains key and
|
||||
value pairs that describe the event.
|
||||
in: body
|
||||
required: true
|
||||
type: array
|
||||
type:
|
||||
description: |
|
||||
The meter type.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
type_1:
|
||||
description: |
|
||||
The type of the alarm, which is either
|
||||
``threshold`` or ``combination``.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
type_2:
|
||||
description: |
|
||||
The meter type. The type value is gauge, delta,
|
||||
or cumulative.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
unit:
|
||||
description: |
|
||||
The unit of measure for the ``volume`` value.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
unit_1:
|
||||
description: |
|
||||
The unit of measure.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
unit_2:
|
||||
description: |
|
||||
The unit type of the data set.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
user_id:
|
||||
description: |
|
||||
The UUID of the user who either created or last
|
||||
updated the resource.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
user_id_1:
|
||||
description: |
|
||||
The UUID of the user.
|
||||
in: body
|
||||
required: true
|
||||
type: string
|
||||
volume:
|
||||
description: |
|
||||
The actual measured value.
|
||||
in: body
|
||||
required: true
|
||||
type: number
|
||||
|
38
bower.json
38
bower.json
|
@ -1,38 +0,0 @@
|
|||
{
|
||||
"name": "fairy-slipper",
|
||||
"version": "0.0.0",
|
||||
"homepage": "https://github.com/russell/fairy-slipper",
|
||||
"authors": [
|
||||
"Russell Sim <russell.sim@gmail.com>"
|
||||
],
|
||||
"description": "OpenStack API browser",
|
||||
"license": "Apache2",
|
||||
"ignore": [
|
||||
"**/.*",
|
||||
"node_modules",
|
||||
"bower_components",
|
||||
"test",
|
||||
"tests"
|
||||
],
|
||||
"dependencies": {
|
||||
"angular": "~1.4.6",
|
||||
"angular-route": "~1.4.6",
|
||||
"angular-loader": "~1.4.6",
|
||||
"angular-mocks": "~1.4.6",
|
||||
"angular-bootstrap": "~0.13.0",
|
||||
"bootstrap": "~3.3.5",
|
||||
"angular-resource": "~1.4.6",
|
||||
"angular-snap": "~1.8.3",
|
||||
"angular-marked": "~0.0.21",
|
||||
"angular-animate": "~1.4.6",
|
||||
"angular-highlightjs": "~0.4.1",
|
||||
"highlightjs": "~8.7.0",
|
||||
"dotjem-angular-tree": "~0.2.1"
|
||||
},
|
||||
"resolutions": {
|
||||
"angular": "1.4.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"angular-mocks": "~1.4.6"
|
||||
}
|
||||
}
|
72
config.py
72
config.py
|
@ -1,72 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Server Specific Configurations
|
||||
server = {
|
||||
'port': '8080',
|
||||
'host': '0.0.0.0'
|
||||
}
|
||||
|
||||
# Pecan Application Configurations
|
||||
app = {
|
||||
'root': 'fairy_slipper.controllers.root.RootController',
|
||||
'modules': ['fairy_slipper'],
|
||||
'static_root': '%(confdir)s/public',
|
||||
'template_path': '%(confdir)s/fairy_slipper/templates',
|
||||
'api_doc': '%(confdir)s/api_doc',
|
||||
'debug': True,
|
||||
'errors': {
|
||||
404: '/error/404',
|
||||
'__force_dict__': True
|
||||
}
|
||||
}
|
||||
|
||||
logging = {
|
||||
'root': {'level': 'INFO', 'handlers': ['console']},
|
||||
'loggers': {
|
||||
'fairy_slipper': {'level': 'DEBUG', 'handlers': ['console']},
|
||||
'pecan.commands.serve': {'level': 'DEBUG', 'handlers': ['console']},
|
||||
'py.warnings': {'handlers': ['console']},
|
||||
'__force_dict__': True
|
||||
},
|
||||
'handlers': {
|
||||
'console': {
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'color'
|
||||
}
|
||||
},
|
||||
'formatters': {
|
||||
'simple': {
|
||||
'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]'
|
||||
'[%(threadName)s] %(message)s')
|
||||
},
|
||||
'color': {
|
||||
'()': 'pecan.log.ColorFormatter',
|
||||
'format': ('%(asctime)s [%(padded_color_levelname)s] [%(name)s]'
|
||||
'[%(threadName)s] %(message)s'),
|
||||
'__force_dict__': True
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Custom Configurations must be in Python dictionary format::
|
||||
#
|
||||
# foo = {'bar':'baz'}
|
||||
#
|
||||
# All configurations are accessible at::
|
||||
# pecan.conf
|
|
@ -1,75 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.abspath('../..'))
|
||||
# -- General configuration ----------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
#'sphinx.ext.intersphinx',
|
||||
'oslosphinx'
|
||||
]
|
||||
|
||||
# autodoc generation is a bit aggressive and a nuisance when doing heavy
|
||||
# text edit cycles.
|
||||
# execute "export SPHINX_DEBUG=1" in your terminal to disable
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'fairy-slipper'
|
||||
copyright = u'2013, OpenStack Foundation'
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = True
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# -- Options for HTML output --------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
# html_theme_path = ["."]
|
||||
# html_theme = '_theme'
|
||||
# html_static_path = ['static']
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = '%sdoc' % project
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass
|
||||
# [howto/manual]).
|
||||
latex_documents = [
|
||||
('index',
|
||||
'%s.tex' % project,
|
||||
u'%s Documentation' % project,
|
||||
u'OpenStack Foundation', 'manual'),
|
||||
]
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
#intersphinx_mapping = {'http://docs.python.org/': None}
|
|
@ -1,4 +0,0 @@
|
|||
============
|
||||
Contributing
|
||||
============
|
||||
.. include:: ../../CONTRIBUTING.rst
|
|
@ -1,26 +0,0 @@
|
|||
.. fairy-slipper documentation master file, created by
|
||||
sphinx-quickstart on Tue Jul 9 22:26:36 2013.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to fairy-slipper's documentation!
|
||||
=========================================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
readme
|
||||
installation
|
||||
usage
|
||||
contributing
|
||||
tempest
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
============
|
||||
Installation
|
||||
============
|
||||
|
||||
At the command line::
|
||||
|
||||
$ pip install fairy-slipper
|
||||
|
||||
Or, if you have virtualenvwrapper installed::
|
||||
|
||||
$ mkvirtualenv fairy-slipper
|
||||
$ pip install fairy-slipper
|
|
@ -1 +0,0 @@
|
|||
.. include:: ../../README.rst
|
|
@ -1,55 +0,0 @@
|
|||
Tempest Examples
|
||||
================
|
||||
|
||||
To back fill the missing examples from the WADL, tempest result logs
|
||||
can be used.
|
||||
|
||||
If you run tempest with a `logging.conf` like. The one below then the
|
||||
resulting log `requests.log` will be suitable for processing.
|
||||
|
||||
::
|
||||
|
||||
[loggers]
|
||||
keys=root,tempest_http
|
||||
|
||||
[handlers]
|
||||
keys=file,devel
|
||||
|
||||
[formatters]
|
||||
keys=simple,dumb
|
||||
|
||||
[logger_root]
|
||||
level=DEBUG
|
||||
handlers=devel
|
||||
|
||||
[logger_tempest_http]
|
||||
level=DEBUG
|
||||
handlers=file
|
||||
qualname=tempest_lib.common.rest_client
|
||||
|
||||
[handler_file]
|
||||
class=FileHandler
|
||||
level=DEBUG
|
||||
args=('requests.log', 'w+')
|
||||
formatter=dumb
|
||||
|
||||
[handler_devel]
|
||||
class=StreamHandler
|
||||
level=INFO
|
||||
args=(sys.stdout,)
|
||||
formatter=simple
|
||||
|
||||
[formatter_simple]
|
||||
format=%(asctime)s.%(msecs)03d %(process)d %(levelname)s: %(message)s
|
||||
|
||||
[formatter_dumb]
|
||||
format=%(message)s
|
||||
|
||||
|
||||
Once the tempest run has completed, you can then process the log into
|
||||
a format for the WADL conversion process. This is done using the `fairy-slipper-tempest-log` tool::
|
||||
|
||||
fairy-slipper-tempest-log -o conversion_files/ requests.log
|
||||
|
||||
Where `conversion_files/` is the directory you are using to store the
|
||||
intermediate files during the migration from docbook.
|
|
@ -1,207 +0,0 @@
|
|||
======
|
||||
How to
|
||||
======
|
||||
|
||||
This page offers a collection of use cases for how to use this tool to maintain
|
||||
API guides using this framework, and describes best practices to give the API
|
||||
consumers information they need to get their tasks done.
|
||||
|
||||
How to migrate existing WADL
|
||||
----------------------------
|
||||
|
||||
The fairy-slipper repository contains a migration script that retrieves the
|
||||
WADL source from the openstack/api-site/ repository and then converts the WADL
|
||||
to RST plus JSON files.
|
||||
|
||||
The use for that script is to run it from the root of the fairy-slipper
|
||||
directory, using -V to create a virtualenv if one doesn't already exist::
|
||||
|
||||
./migrate.sh
|
||||
|
||||
The script has some options for the level of migration:
|
||||
|
||||
--docs-only Only install fairy-slipper and the api-site clone
|
||||
--verbose-docs Verbose logging of document generation
|
||||
--docbkx2json Only perform docbookx to json conversion
|
||||
--wadl2swagger Only perform wadl to swagger conversion
|
||||
--swagger2rst Only perform swagger to rst conversion
|
||||
|
||||
The full migration outputs all files to a `api_doc` directory.
|
||||
|
||||
The interim migrations output files to a `conversions_files` directory. For
|
||||
example, if you run with --wadl2swagger you can see Swagger JSON reference
|
||||
files in `conversion_files`.
|
||||
|
||||
The `api_doc` output is organized by service in directories, such as
|
||||
`blockstorage` and `compute`.
|
||||
|
||||
You may see warnings logged, such as::
|
||||
|
||||
2015-11-30 14:57:13,946 fairy_slipper.cmd.wadl_to_swagger WARNING Can't find method listFlavors
|
||||
2015-11-30 14:57:14,377 fairy_slipper.cmd.wadl_to_swagger WARNING No tags for method getServerAddresses
|
||||
|
||||
That means that the WADL itself has an unclear definition. So far we have been
|
||||
able to address these warnings by patching the WADL files. Here are example
|
||||
patches that fixed what was uncovered in the past:
|
||||
|
||||
* https://review.openstack.org/#/c/213571/ (couldn't find a List flavors method)
|
||||
* https://review.openstack.org/#/c/215350/ (couldn't find a /ips method)
|
||||
|
||||
To find out which WADL files have issues, run the `migrate.sh` script with
|
||||
`--verbose-docs`::
|
||||
|
||||
2015-12-01 08:23:14,549 fairy_slipper.cmd.wadl_to_swagger INFO Parsing /Users/annegentle/src/fairy-slipper/api-site/api-ref/src/wadls/netconn-api/src/os-networks.wadl
|
||||
2015-12-01 08:23:14,551 fairy_slipper.cmd.wadl_to_swagger WARNING No tags for method listVersionsv2-neutron
|
||||
2015-12-01 08:23:14,552 fairy_slipper.cmd.wadl_to_swagger WARNING No tags for method showVersionDetailsv2-neutron
|
||||
|
||||
How to build and display API guides
|
||||
-----------------------------------
|
||||
|
||||
Once you have the migrated files, including RST and JSON files, you can run the
|
||||
web server to display the API reference information.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
* npm
|
||||
* bower
|
||||
|
||||
This installs bower into the global system path, which is not ideal. However,
|
||||
if you run into issues while running the server, you can run these commands::
|
||||
|
||||
sudo npm install -g bower
|
||||
bower install installed
|
||||
|
||||
To run the web server after getting migrated content with the `migrate.sh`
|
||||
script, you run the `run_server.sh` script::
|
||||
|
||||
./run_server.sh
|
||||
|
||||
A Pecan-based web server then runs on http://127.0.0.1:8080 in your local
|
||||
environment.
|
||||
|
||||
To see the JSON listing of all documented APIs, go to:
|
||||
http://127.0.0.1:8080/doc/
|
||||
|
||||
Then, you can look at individual REST API URL such as:
|
||||
|
||||
"url": "identity/v2/",
|
||||
|
||||
by adding identity/v2/ to the local server URL http://127.0.0.1:8080/doc/.
|
||||
|
||||
The actual Swagger JSON file is here for inspection once you're running the web
|
||||
server:
|
||||
|
||||
http://127.0.0.1:8080/doc/identity/v2/
|
||||
|
||||
If you want to work on the server code (AngularJS) itself, install `grunt` and
|
||||
you can then view the web server at http://127.0.0.1:9000 (port 9000 instead of
|
||||
8080) with automatic reloading.
|
||||
|
||||
How to author API reference information
|
||||
---------------------------------------
|
||||
|
||||
Once all the WADL information is migrated to the new format, we need to author
|
||||
API information in a certain format so we can integrate it on the
|
||||
http://developer.openstack.org pages.
|
||||
|
||||
The interim `conversion_files` files are:
|
||||
|
||||
* JSON files containing tags that Sphinx uses to put content together
|
||||
* JSON files containing Swagger snippets with all the possible paths from WADL
|
||||
|
||||
The basic `api_doc` files are contained in directories per service and version:
|
||||
|
||||
* <servicename>/<version>/v1.rst: contains a listing of each
|
||||
GET/PUT/POST/DELETE call as a Sphinx directive such as `.. http:put::` with
|
||||
a title, synopsis, request and response examples pointing to files in the
|
||||
`/examples/` directory, accepts and produces info, tag info, request schema,
|
||||
and parameters along with documented status codes.
|
||||
* <servicename>/<version>/v1-tags.rst: contains the groupings such as API
|
||||
versions, that were available in the WADL file. Contains considerable more
|
||||
docs and descriptions if that content was originally written in the WADL,
|
||||
such as defining backup statuses, for example.
|
||||
* <servicename>/<version>/: contains a Swagger-format listing of all calls
|
||||
users can get from that service. These JSON files are able to be assembled
|
||||
into a longer Swagger file. *
|
||||
|
||||
.. note::
|
||||
Theoretically, anyway. Have yet to do this re-assembly in practice. Or, the
|
||||
Swagger can be hand-written using the Swagger format for an API that
|
||||
did not originally have a WADL file migrated. In that case, you would still
|
||||
use the file and directory structure described above to have the Angular JS
|
||||
app display the content.
|
||||
|
||||
How to author API concepts and how-to articles
|
||||
----------------------------------------------
|
||||
|
||||
The landing page at developer.openstack.org is sourced from
|
||||
https://github.com/openstack/api-site/tree/master/www/index.html. It
|
||||
is undergoing a revision, but the page currently contains links to SDKs, CLIs,
|
||||
and API reference information for OpenStack services.
|
||||
|
||||
For API articles such as "What are the request and response formats for an API"
|
||||
or "Server concepts" we can point to the Compute API Guide for example,
|
||||
published to http://developer.openstack.org/api-guide/compute/ and sourced from
|
||||
https://github.com/openstack/nova/tree/master/api-guide/source. These articles
|
||||
are written in RST and built with Sphinx. Each OpenStack project should follow
|
||||
that pattern for conceptual and how-to information published in articles to
|
||||
developer.openstack.org.
|
||||
|
||||
How to generate API reference outlines from code
|
||||
------------------------------------------------
|
||||
|
||||
You can configure the API endpoint on a service that uses `routes` as its
|
||||
routing system to generate API reference information using this tool.
|
||||
First look at the config to determine the router class path. It can be easily
|
||||
identified in most services `paste.ini` files by tracing back from the route.
|
||||
|
||||
As an example, using this Murano config you can see that the URL `/v1` maps to
|
||||
the app `apiv1app`, and since this service uses `routes` you can just copy all
|
||||
class path from the paste.app_factory line except for the '.factory' part.
|
||||
Copying these paths makes sense because we don't want to make a router, we
|
||||
simply want it so we can inspect it.
|
||||
|
||||
::
|
||||
|
||||
[composite:rootapp]
|
||||
use = egg:Paste#urlmap
|
||||
/: apiversions
|
||||
/v1: apiv1app
|
||||
|
||||
[app:apiversions]
|
||||
paste.app_factory = murano.api.versions:create_resource
|
||||
|
||||
[app:apiv1app]
|
||||
paste.app_factory = murano.api.v1.router:API.factory
|
||||
|
||||
|
||||
So to use fairy-slipper in this project we would add the following to
|
||||
`api-paste.ini`::
|
||||
|
||||
[composite:rootapp]
|
||||
use = egg:Paste#urlmap
|
||||
/: apiversions
|
||||
/v1: apiv1app
|
||||
/docs: fairyslipperapp
|
||||
|
||||
[app:apiversions]
|
||||
paste.app_factory = murano.api.versions:create_resource
|
||||
|
||||
[app:apiv1app]
|
||||
paste.app_factory = murano.api.v1.router:API.factory
|
||||
|
||||
[app:fairyslipperapp]
|
||||
paste.app_factory = fairy_slipper.app_routes:app_factory
|
||||
v1 = murano.api.v1.router:API
|
||||
|
||||
And that's it, well in an ideal world it would be. However, because of the
|
||||
flexibility of paste deploy this configuration as-is won't work for a running
|
||||
instance of Murano. You would have to also disable all the middleware other
|
||||
than `request_id faultwrap rootapp`, though, this almost certainly will result
|
||||
in a broken Murano service. So, for the purposes of creating an outline, or for
|
||||
ensuring completeness of the API docs starting point, you could change the
|
||||
`api-paste.ini` for the purposes of inspection only.
|
||||
|
||||
Note that the file `controllers/routes_inspector.py` in fairy-slipper is
|
||||
written to match the Murano example. If your service has a different factory
|
||||
method, you could change fairy-slipper to match.
|
|
@ -1,5 +0,0 @@
|
|||
# -*- mode: python -*-
|
||||
from os import path
|
||||
from pecan.deploy import deploy
|
||||
pathname = path.join(path.abspath(path.dirname(__file__)), 'config.py')
|
||||
application = deploy(pathname)
|
|
@ -1,23 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import pbr.version
|
||||
|
||||
__version__ = pbr.version.VersionInfo(
|
||||
'fairy_slipper').version_string()
|
|
@ -1,29 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from pecan import make_app
|
||||
|
||||
|
||||
def setup_app(config):
|
||||
|
||||
app_conf = dict(config.app)
|
||||
|
||||
return make_app(
|
||||
app_conf.pop('root'),
|
||||
logging=getattr(config, 'logging', {}),
|
||||
**app_conf
|
||||
)
|
|
@ -1,30 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from pecan import make_app
|
||||
|
||||
from fairy_slipper.controllers import routes_inspector
|
||||
|
||||
|
||||
def setup_app(local_conf):
|
||||
return make_app(
|
||||
routes_inspector.VersionAPIController(local_conf),
|
||||
)
|
||||
|
||||
|
||||
def app_factory(global_config, **local_conf):
|
||||
return setup_app(local_conf=local_conf)
|
|
@ -1,327 +0,0 @@
|
|||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import codecs
|
||||
from jinja2 import Environment
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from os import path
|
||||
import textwrap
|
||||
import yaml
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
environment = Environment()
|
||||
|
||||
|
||||
TMPL_API = """
|
||||
{{method['title']}}
|
||||
{% for i in range(method['title']|count) -%}
|
||||
=
|
||||
{%- endfor %}
|
||||
|
||||
.. rest_method:: {{method['method']|upper}} {{path}}
|
||||
|
||||
{{method['summary']}}
|
||||
|
||||
{%- if method.description != '' %}
|
||||
{% for line in method.description.split('\n') %}
|
||||
{{line}}
|
||||
{%- endfor %}
|
||||
{%- endif %}
|
||||
|
||||
{% for status_code, response in method['responses'].items() %}
|
||||
{%- if status_code == '200' %}
|
||||
Normal response codes: {{status_code}}
|
||||
{% endif -%}
|
||||
{%- endfor -%}
|
||||
|
||||
|
||||
Error response codes:
|
||||
{%- for status_code in error_codes -%}
|
||||
{{status_code}},
|
||||
{%- endfor %}
|
||||
|
||||
|
||||
Request Parameters
|
||||
------------------
|
||||
|
||||
.. rest_parameters:: {{method['operationId']}}.yaml
|
||||
{% for p in request_params %}
|
||||
- {{p}}: {{p}}
|
||||
{%- endfor %}
|
||||
|
||||
{%- if 'application/json' in method['examples'] %}
|
||||
|
||||
Request Example
|
||||
---------------
|
||||
|
||||
.. literalinclude:: {{method['request_sample']}}
|
||||
:language: javascript
|
||||
{% endif %}
|
||||
|
||||
|
||||
{% if response_params|count > 0 %}
|
||||
Response Parameters
|
||||
-------------------
|
||||
|
||||
.. rest_parameters:: {{method['operationId']}}.yaml
|
||||
{% for p in response_params %}
|
||||
- {{p}}: {{p}}
|
||||
{%- endfor %}
|
||||
{% endif %}
|
||||
|
||||
|
||||
{% for status_code, response in method['responses'].items() %}
|
||||
{%- if status_code == '200' %}
|
||||
Response Example
|
||||
----------------
|
||||
|
||||
.. literalinclude:: {{response['response_sample']}}
|
||||
:language: javascript
|
||||
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def create_parameter(name, form, request, opt, ptype):
|
||||
p = {}
|
||||
p[name] = {}
|
||||
if form is not '':
|
||||
p[name]['format'] = form
|
||||
p[name]['in'] = request
|
||||
p[name]['required'] = opt
|
||||
p[name]['type'] = ptype
|
||||
return p
|
||||
|
||||
|
||||
def format_param(desc):
|
||||
desc.rstrip('\n')
|
||||
param = ' description: |\n'
|
||||
param_wrap = textwrap.TextWrapper(
|
||||
initial_indent=param,
|
||||
subsequent_indent=' ')
|
||||
if ('::\n\n' in desc) or \
|
||||
('.. code-block::' in desc):
|
||||
for i, line in enumerate(desc.split('\n')):
|
||||
if len(line) is 0:
|
||||
param += '\n'
|
||||
else:
|
||||
param += ' ' + line + '\n'
|
||||
return param.rstrip('\n')
|
||||
else:
|
||||
new_text = param_wrap.wrap(' ' + desc)
|
||||
return '\n'.join(new_text)
|
||||
|
||||
|
||||
def main1(filename, output_dir):
|
||||
log.info('Parsing %s' % filename)
|
||||
swagger = json.load(open(filename))
|
||||
|
||||
info = swagger['info']
|
||||
version = info['version']
|
||||
service = info['service']
|
||||
service_path = path.join(output_dir, service)
|
||||
full_path = path.join(service_path, version)
|
||||
if not path.exists(service_path):
|
||||
os.makedirs(service_path)
|
||||
if not path.exists(full_path):
|
||||
os.makedirs(full_path)
|
||||
|
||||
# Create .inc files for each tag
|
||||
for tags in swagger['tags']:
|
||||
filename_rst = ""
|
||||
if 'name' in tags and tags['name'] is not "":
|
||||
filename_rst = '%s.inc' % tags['name']
|
||||
filepath_rst = path.join(full_path, filename_rst)
|
||||
|
||||
log.info("Writing RST inc file %s", filepath_rst)
|
||||
with codecs.open(filepath_rst,
|
||||
'w', "utf-8") as out_file:
|
||||
out_file.write(".. -*- rst -*-\n\n")
|
||||
out_file.write('=' * len(tags['description']))
|
||||
out_file.write("\n")
|
||||
out_file.write(tags['description'])
|
||||
out_file.write("\n")
|
||||
out_file.write('=' * len(tags['description']))
|
||||
out_file.write("\n\n")
|
||||
out_file.write(tags['summary'])
|
||||
out_file.write("\n\n")
|
||||
|
||||
for key_path, paths in swagger['paths'].items():
|
||||
for p in paths:
|
||||
opId = p['operationId']
|
||||
method_req_params = p['parameters']
|
||||
tag = p['tags'][0]
|
||||
request_params = []
|
||||
response_params = []
|
||||
|
||||
filename = '%s.yaml' % opId
|
||||
filepath = path.join(full_path, filename)
|
||||
log.info("Writing %s", filepath)
|
||||
stream = file(filepath, 'a')
|
||||
|
||||
# get the response objects
|
||||
method_responses = p['responses']
|
||||
for r, rval in method_responses.items():
|
||||
if 'headers' in rval:
|
||||
for h, val in rval['headers'].items():
|
||||
new_param = create_parameter(h,
|
||||
val['format'],
|
||||
str('header'),
|
||||
val['required'],
|
||||
val['type'])
|
||||
|
||||
response_params.append(h)
|
||||
yaml.safe_dump(new_param,
|
||||
stream,
|
||||
allow_unicode=True,
|
||||
default_flow_style=False)
|
||||
new_desc = format_param(val['description'])
|
||||
stream.write(new_desc.encode('utf8'))
|
||||
stream.write('\n\n')
|
||||
|
||||
# get the response parameters
|
||||
if 'schema' in rval:
|
||||
response_schema_name = opId + '_' + r
|
||||
for op, opval in swagger['definitions'].items():
|
||||
if op == response_schema_name:
|
||||
props = opval['properties']
|
||||
for k, val in props.items():
|
||||
new_param = create_parameter(k,
|
||||
val['format'],
|
||||
str("body"),
|
||||
val['required'],
|
||||
val['type'])
|
||||
|
||||
# add to response param list
|
||||
response_params.append(k)
|
||||
|
||||
yaml.safe_dump(new_param,
|
||||
stream,
|
||||
allow_unicode=True,
|
||||
default_flow_style=False)
|
||||
new_desc = format_param(val['description'])
|
||||
stream.write(new_desc.encode('utf8'))
|
||||
stream.write('\n\n')
|
||||
|
||||
# get the request parameters
|
||||
for params in method_req_params:
|
||||
if params['in'] == 'body':
|
||||
for op, opval in swagger['definitions'].items():
|
||||
if op == opId:
|
||||
props = opval['properties']
|
||||
for k, val in props.items():
|
||||
new_param = create_parameter(k,
|
||||
val['format'],
|
||||
str("body"),
|
||||
val['required'],
|
||||
val['type'])
|
||||
|
||||
# add to request param list
|
||||
request_params.append(k)
|
||||
|
||||
yaml.safe_dump(new_param,
|
||||
stream,
|
||||
allow_unicode=True,
|
||||
default_flow_style=False)
|
||||
new_desc = format_param(val['description'])
|
||||
stream.write(new_desc.encode('utf8'))
|
||||
stream.write('\n\n')
|
||||
|
||||
else:
|
||||
new_param = create_parameter(params['name'],
|
||||
params['format'],
|
||||
params['in'],
|
||||
params['required'],
|
||||
params['type'])
|
||||
# add to request param list
|
||||
request_params.append(params['name'])
|
||||
|
||||
yaml.safe_dump(new_param,
|
||||
stream,
|
||||
allow_unicode=True,
|
||||
default_flow_style=False)
|
||||
new_desc = format_param(params['description'])
|
||||
stream.write(new_desc.encode('utf8'))
|
||||
stream.write('\n\n')
|
||||
|
||||
# error responses
|
||||
error_codes = []
|
||||
for status_code in p['responses']:
|
||||
if status_code > '200':
|
||||
error_codes.append(status_code)
|
||||
|
||||
TMPL = environment.from_string(TMPL_API)
|
||||
result = TMPL.render(method=p,
|
||||
path=key_path,
|
||||
request_params=request_params,
|
||||
response_params=response_params,
|
||||
error_codes=error_codes)
|
||||
|
||||
# Append method info to inc file
|
||||
filename_rst = '%s.inc' % tag
|
||||
filepath_rst = path.join(full_path, filename_rst)
|
||||
|
||||
log.info("Writing RST inc file %s", filepath_rst)
|
||||
with codecs.open(filepath_rst,
|
||||
"a", "utf-8") as out_file:
|
||||
out_file.write(result)
|
||||
|
||||
# write out a rst file
|
||||
filename_rst = '%s.rst' % opId
|
||||
filepath_rst = path.join(full_path, filename_rst)
|
||||
log.info("Writing RST/operation %s", filepath_rst)
|
||||
with codecs.open(filepath_rst,
|
||||
'w', "utf-8") as out_file:
|
||||
out_file.write(result)
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument(
|
||||
'-v', '--verbose', action='count', default=0,
|
||||
help="Increase verbosity (specify multiple times for more)")
|
||||
parser.add_argument(
|
||||
'-o', '--output-dir', action='store',
|
||||
help="The directory where the yaml files will be output.")
|
||||
parser.add_argument(
|
||||
'filename',
|
||||
help="File to convert")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
log_level = logging.WARNING
|
||||
if args.verbose == 1:
|
||||
log_level = logging.INFO
|
||||
elif args.verbose >= 2:
|
||||
log_level = logging.DEBUG
|
||||
|
||||
logging.basicConfig(
|
||||
level=log_level,
|
||||
format='%(asctime)s %(name)s %(levelname)s %(message)s')
|
||||
|
||||
filename = path.abspath(args.filename)
|
||||
|
||||
main1(filename, output_dir=args.output_dir)
|
|
@ -1,722 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from os import path
|
||||
import re
|
||||
import textwrap
|
||||
import xml.sax
|
||||
|
||||
import prettytable
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SECTIONS = {'API_Versions': 'api-versions',
|
||||
'Database_Instances': 'database-instances',
|
||||
'admin-tenants': 'admin-tenants',
|
||||
'admin-tokens': 'admin-tokens',
|
||||
'admin-users': 'admin-users',
|
||||
'admin-versions': 'admin-versions',
|
||||
'alarms': 'alarms',
|
||||
'build-info': 'build-info',
|
||||
'capabilities': 'capabilities',
|
||||
'compute_extensions': 'extensions',
|
||||
'compute_flavors': 'flavors',
|
||||
'compute_image_metadata': 'image-metadata',
|
||||
'compute_images': 'images',
|
||||
'compute_limits': 'limits',
|
||||
'compute_server-actions': 'server-actions',
|
||||
'compute_server-addresses': 'server-addresses',
|
||||
'compute_server_metadata': 'server-metadata',
|
||||
'compute_servers': 'servers',
|
||||
'compute_versions': 'versions',
|
||||
'credentials-v3': 'credentials',
|
||||
'database-instance-actions': 'database-instance-actions',
|
||||
'databases': 'databases',
|
||||
'datastores': 'datastores',
|
||||
'diagnostics-v2.1': 'diagnostics',
|
||||
'domains-v3': 'domains',
|
||||
'endpoints-v3': 'endpoints',
|
||||
'ext-backups-v2': 'ext-backups',
|
||||
'extraroute-ext': 'extraroute',
|
||||
'flavor-extra-specs-v2.1': 'flavor-extra-specs',
|
||||
'flavors': 'flavors',
|
||||
'general-info': 'general-info',
|
||||
'groups-v3': 'groups',
|
||||
'heat-versions': 'heat-versions',
|
||||
'identity-auth-v2': 'identity-auth',
|
||||
'identity-v2-versions': 'versions',
|
||||
'identity_v3_OS-INHERIT-ext': 'inherit',
|
||||
'identity_v3_OS-KDS-ext': 'kds',
|
||||
'identity_v3_OS-OAUTH1-ext': 'oauth1',
|
||||
'identity_v3_OS-TRUST-ext': 'trust',
|
||||
'image-data-v2': 'image-data',
|
||||
'image-schemas-v2': 'image-schemas',
|
||||
'image-tags-v2': 'image-tags',
|
||||
'images-v1': 'images',
|
||||
'images-v2': 'images',
|
||||
'keypairs-v2.1': 'keypairs',
|
||||
'layer3': 'layer3',
|
||||
'lbaas-v1.0': 'lbaas',
|
||||
'lbaas-v2.0': 'lbaas',
|
||||
'limits-v2.1': 'limits',
|
||||
'members-v1': 'members',
|
||||
'members-v2': 'members',
|
||||
'metering-ext': 'metering',
|
||||
'meters': 'meters',
|
||||
'network_multi_provider-ext': 'network-multi-provider',
|
||||
'network_provider-ext': 'network-provider',
|
||||
'network_vlan_transparency-ext': 'network-vlan-transparency',
|
||||
'networks': 'networks',
|
||||
'neutron-versions-v2': 'versions',
|
||||
'neutron_extensions': 'extensions',
|
||||
'os-admin-actions': 'admin-actions',
|
||||
'os-admin-actions-v2.1': 'admin-actions',
|
||||
'os-admin-password-v2.1': 'admin-password',
|
||||
'os-agents': 'agents',
|
||||
'os-agents-v2.1': 'agents',
|
||||
'os-aggregates': 'aggregates',
|
||||
'os-aggregates-v2.1': 'aggregates',
|
||||
'os-availability-zone': 'availability-zone',
|
||||
'os-availability-zone-v2.1': 'availability-zone',
|
||||
'os-baremetal-ext-status': 'baremetal-ext-status',
|
||||
'os-block-device-mapping-v2-boot': 'block-device-mapping',
|
||||
'os-cells-v2.1': 'cells',
|
||||
'os-certificates': 'certificates',
|
||||
'os-certificates-v2.1': 'certificates',
|
||||
'os-cloudpipe': 'cloudpipe',
|
||||
'os-config-drive-v2.1': 'config-drive',
|
||||
'os-console-output': 'console-output',
|
||||
'os-console-output-v2.1': 'console-output',
|
||||
'os-consoles': 'consoles',
|
||||
'os-coverage': 'coverage',
|
||||
'os-create-backup-v2.1': 'create-backup',
|
||||
'os-createserverext': 'createserverext',
|
||||
'os-deferred-delete': 'deferred-delete',
|
||||
'os-deferred-delete-v2.1': 'deferred-delete',
|
||||
'os-diagnostics': 'diagnostics',
|
||||
'os-disk-config': 'disk-config',
|
||||
'os-evacuate-v2.1': 'evacuate',
|
||||
'os-ext-az': 'ext-az',
|
||||
'os-ext-img-size': 'ext-img-size',
|
||||
'os-ext-ips': 'ext-ips',
|
||||
'os-extended-availability-zone-v2.1': 'extended-availability-zone',
|
||||
'os-extended-networks': 'extended-networks',
|
||||
'os-extended-server-attributes': 'extended-server-attributes',
|
||||
'os-extended-server-attributes-v2.1': 'extended-server-attributes',
|
||||
'os-extended-status': 'extended-status',
|
||||
'os-extended-status-v2.1': 'extended-status',
|
||||
'os-fixed-ips': 'fixed-ips',
|
||||
'os-flavor-access': 'flavor-access',
|
||||
'os-flavor-access-v2.1': 'flavor-access',
|
||||
'os-flavor-extra-specs': 'flavor-extra-specs',
|
||||
'os-flavor-manage-v2.1': 'flavor-manage',
|
||||
'os-flavor-rxtx': 'flavor-rxtx',
|
||||
'os-flavor-rxtx-v2.1': 'flavor-rxtx',
|
||||
'os-flavor-swap': 'flavor-swap',
|
||||
'os-flavorextradata': 'flavorextradata',
|
||||
'os-flavormanage': 'flavormanage',
|
||||
'os-flavors-v2.1': 'flavors',
|
||||
'os-floating-ip-dns': 'floating-ip-dns',
|
||||
'os-floating-ip-dns-v2.1': 'floating-ip-dns',
|
||||
'os-floating-ip-pools': 'floating-ip-pools',
|
||||
'os-floating-ip-pools-v2.1': 'floating-ip-pools',
|
||||
'os-floating-ips': 'floating-ips',
|
||||
'os-floating-ips-bulk': 'floating-ips-bulk',
|
||||
'os-floating-ips-bulk-v2.1': 'floating-ips-bulk',
|
||||
'os-floating-ips-v2.1': 'floating-ips',
|
||||
'os-flv-disabled': 'flv-disabled',
|
||||
'os-hosts': 'hosts',
|
||||
'os-hosts-v2.1': 'hosts',
|
||||
'os-hypervisor-status': 'hypervisor-status',
|
||||
'os-hypervisors': 'hypervisors',
|
||||
'os-hypervisors-v2.1': 'hypervisors',
|
||||
'os-instance-actions': 'instance-actions',
|
||||
'os-instance-actions-v2.1': 'instance-actions',
|
||||
'os-instance-usage-audit-log-v2.1': 'instance-usage-audit-log',
|
||||
'os-interface': 'interface',
|
||||
'os-keypairs': 'keypairs',
|
||||
'os-ksadm-admin-ext': 'ksadm-admin',
|
||||
'os-kscatalog-ext': 'kscatalog',
|
||||
'os-ksec2-admin-ext': 'ksec2-admin',
|
||||
'os-kss3-admin-ext': 'kss3-admin',
|
||||
'os-ksvalidate-ext': 'ksvalidate',
|
||||
'os-limits-v2': 'limits',
|
||||
'os-metadef-namespace-v2': 'metadef-namespace',
|
||||
'os-metadef-object-v2': 'metadef-object',
|
||||
'os-metadef-property-v2.wadl': 'metadef-property',
|
||||
'os-metadef-resourcetype-v2': 'metadef-resourcetype',
|
||||
'os-metadef-schemas-v2': 'metadef-schemas',
|
||||
'os-metadef-tag-v2': 'metadef-tag',
|
||||
'os-migrations': 'migrations',
|
||||
'os-migrations-v2.1': 'migrations',
|
||||
'os-multi-server-create': 'multi-server-create',
|
||||
'os-multinic-v2.1': 'multinic',
|
||||
'os-multiple-create-v2.1': 'multiple-create',
|
||||
'os-networks': 'networks',
|
||||
'os-networks-v2.1': 'networks',
|
||||
'os-qos-v2-qos-specs': 'qos-v2-qos-specs',
|
||||
'os-quota-class-sets': 'quota-class-sets',
|
||||
'os-quota-class-sets-v2.1': 'quota-class-sets',
|
||||
'os-quota-sets': 'quota-sets',
|
||||
'os-quota-sets-v2': 'quota-sets',
|
||||
'os-quota-sets-v2.1': 'quota-sets',
|
||||
'os-remote-consoles-v2.1': 'remote-consoles',
|
||||
'os-rescue': 'rescue',
|
||||
'os-scheduler-hints': 'scheduler-hints',
|
||||
'os-security-group-default-rules': 'security-group-default-rules',
|
||||
'os-security-groups': 'security-groups',
|
||||
'os-security-groups-v2.1': 'security-groups',
|
||||
'os-server-OS-EXT-IPS-MAC': 'server-ext-ips-mac',
|
||||
'os-server-actions-v2.1': 'server-actions',
|
||||
'os-server-groups': 'server-groups',
|
||||
'os-server-groups-v2.1': 'server-groups',
|
||||
'os-server-password': 'server-password',
|
||||
'os-server-password-v2.1': 'server-password',
|
||||
'os-server-shelve': 'server-shelve',
|
||||
'os-server-start-stop': 'server-start-stop',
|
||||
'os-server-usage-v2.1': 'server-usage',
|
||||
'os-services': 'services',
|
||||
'os-services-v2.1': 'services',
|
||||
'os-shelve-v2.1': 'shelve',
|
||||
'os-simple-tenant-usage': 'simple-tenant-usage',
|
||||
'os-tenant-networks-v2.1': 'tenant-networks',
|
||||
'os-used-limits': 'used-limits',
|
||||
'os-used-limits-for-admins': 'used-limits-for-admins',
|
||||
'os-virtual-interfaces': 'virtual-interfaces',
|
||||
'os-virtual-interfaces-v2.1': 'virtual-interfaces',
|
||||
'os-volume': 'volume',
|
||||
'os-volume-manage-v2': 'volume-manage',
|
||||
'os-volume-type-access-v2': 'volume-type-access',
|
||||
'os-volume_attachments': 'volume-attachments',
|
||||
'policies-v3': 'policies',
|
||||
'port_binding-ext': 'port-binding',
|
||||
'ports': 'ports',
|
||||
'projects-v3': 'projects',
|
||||
'quotas-ext': 'quotas',
|
||||
'resources': 'resources',
|
||||
'roles-v3': 'roles',
|
||||
'samples': 'samples',
|
||||
'security_groups': 'security-groups',
|
||||
'server-ips-v2.1': 'server-ips',
|
||||
'service-catalog-v3': 'service-catalog',
|
||||
'service-status': 'service-status',
|
||||
'shared_images_v1': 'shared-images',
|
||||
'software-config': 'software-config',
|
||||
'stack-actions': 'stack-actions',
|
||||
'stack-events': 'stack-events',
|
||||
'stack-resources': 'stack-resources',
|
||||
'stack-templates': 'stack-templates',
|
||||
'stacks': 'stacks',
|
||||
'storage_account_services': 'storage-account-services',
|
||||
'storage_container_services': 'storage-container-services',
|
||||
'storage_object_services': 'storage-object-services',
|
||||
'subnets': 'subnets',
|
||||
'tokens-v3': 'tokens',
|
||||
'user_management': 'user-management',
|
||||
'users-v3': 'users',
|
||||
'v1.1clusters': 'clusters',
|
||||
'v1.1clustertemplate': 'clustertemplate',
|
||||
'v1.1datasources': 'datasources',
|
||||
'v1.1event-log': 'event-log',
|
||||
'v1.1imageregistry': 'imageregistry',
|
||||
'v1.1jobbinaries': 'jobbinaries',
|
||||
'v1.1jobbinary-internals': 'jobbinary-internals',
|
||||
'v1.1jobexecutions': 'job-executions',
|
||||
'v1.1jobs': 'jobs',
|
||||
'v1.1jobtypes': 'job-types',
|
||||
'v1.1nodegrouptemplate': 'node-group-template',
|
||||
'v1.1plugins': 'plugins',
|
||||
'v2.1os-fping': 'fping',
|
||||
'versions-identity-v3': 'versions-identity',
|
||||
'versions-images-v2': 'versions-images',
|
||||
'versions-v1': 'versions',
|
||||
'versions-v2.1': 'versions',
|
||||
'volume-api-v1-snapshots': 'snapshots',
|
||||
'volume-api-v1-types': 'types',
|
||||
'volume-api-v1-versions': 'versions',
|
||||
'volume-api-v1-volumes': 'volumes',
|
||||
'volume-api-v2-extensions': 'extensions',
|
||||
'volume-api-v2-snapshots': 'snapshots',
|
||||
'volume-api-v2-types': 'types',
|
||||
'volume-api-v2-versions': 'versions',
|
||||
'volume-api-v2-volumes': 'volumes',
|
||||
'vpnaas-v2.0': 'vpnaas'}
|
||||
|
||||
|
||||
VERSION_RE = re.compile('v[0-9\.]+')
|
||||
WHITESPACE_RE = re.compile('[\s]+', re.MULTILINE)
|
||||
TITLE_RE = re.compile(
|
||||
'(.*) API (v([\d.]+) )?(\S*)[ ]*\((SUPPORTED|CURRENT|DEPRECATED|\
|
||||
EXPERIMENTAL)\)')
|
||||
CAPTION_RE = re.compile('[*`]*')
|
||||
MARKUP_RE = re.compile('[.,:;)]+')
|
||||
|
||||
|
||||
class TableMixin(object):
|
||||
def visit_table(self, attrs):
|
||||
self.__table = prettytable.PrettyTable(hrules=prettytable.ALL)
|
||||
self.__table.header = False
|
||||
|
||||
def depart_table(self):
|
||||
self.content.append('\n\n')
|
||||
self.content.append(str(self.__table))
|
||||
self.content.append('\n\n')
|
||||
|
||||
def visit_caption(self, attrs):
|
||||
self.content_stack.append([])
|
||||
|
||||
def depart_caption(self):
|
||||
content = ''.join(self.content_stack.pop()).strip()
|
||||
content = CAPTION_RE.sub('', content)
|
||||
content = WHITESPACE_RE.sub(' ', content)
|
||||
content = '**' + content + '**'
|
||||
self.content.append(content)
|
||||
|
||||
def visit_th(self, attrs):
|
||||
self.__table.header = True
|
||||
|
||||
def depart_th(self):
|
||||
heading = self.content.pop().strip()
|
||||
self.__table.field_names.append(heading)
|
||||
self.__table.align[heading] = 'l'
|
||||
self.__table.valign[heading] = 't'
|
||||
self.__table.max_width[heading] = 80
|
||||
|
||||
def visit_tr(self, attrs):
|
||||
self.__row = []
|
||||
|
||||
def visit_td(self, attrs):
|
||||
self.content_stack.append([])
|
||||
|
||||
def depart_td(self):
|
||||
self.__row.append(''.join(self.content_stack.pop()).strip())
|
||||
|
||||
def depart_tr(self):
|
||||
if self.__row:
|
||||
columns = len(self.__table.field_names)
|
||||
self.__row.extend(['' for n in range(columns - len(self.__row))])
|
||||
self.__table.add_row(self.__row)
|
||||
|
||||
|
||||
class APIChapterContentHandler(xml.sax.ContentHandler, TableMixin):
|
||||
|
||||
EMPHASIS = {
|
||||
'bold': '**',
|
||||
'italic': '*'
|
||||
}
|
||||
|
||||
def __init__(self, filename, api_parser):
|
||||
self.filename = filename
|
||||
self.api_parser = api_parser
|
||||
|
||||
def startDocument(self):
|
||||
super(APIChapterContentHandler, self).startDocument()
|
||||
self.tags = {}
|
||||
self.current_tag = None
|
||||
|
||||
# general state
|
||||
self.tag_stack = []
|
||||
self.attr_stack = []
|
||||
self.content_stack = [[]]
|
||||
self.current_emphasis = None
|
||||
self.nesting = 0
|
||||
self.no_space = False
|
||||
self.fill_width = 67
|
||||
self.wrapper = textwrap.TextWrapper(width=self.fill_width)
|
||||
self.inline_markup_stack = []
|
||||
self.base_indent = ' '
|
||||
self.hyperlink_end = False
|
||||
self.markup_end = False
|
||||
|
||||
@property
|
||||
def content(self):
|
||||
return self.content_stack[-1]
|
||||
|
||||
def search_stack_for(self, tag_name):
|
||||
for tag, attrs in zip(reversed(self.tag_stack),
|
||||
reversed(self.attr_stack)):
|
||||
if tag == tag_name:
|
||||
return attrs
|
||||
|
||||
def on_top_tag_stack(self, *args):
|
||||
return self.tag_stack[-len(args):] == list(args)
|
||||
|
||||
def startElement(self, name, _attrs):
|
||||
attrs = dict(_attrs)
|
||||
|
||||
self.tag_stack.append(name)
|
||||
self.attr_stack.append(attrs)
|
||||
|
||||
if self.on_top_tag_stack('chapter', 'section', 'title'):
|
||||
self.content_stack.append([])
|
||||
|
||||
if self.on_top_tag_stack('chapter', 'title'):
|
||||
self.content_stack.append([])
|
||||
|
||||
if self.on_top_tag_stack('chapter', 'section'):
|
||||
self.content_stack.append([])
|
||||
id = attrs['xml:id']
|
||||
id = SECTIONS.get(id, id)
|
||||
self.current_tag = {'name': id}
|
||||
self.api_parser.tags.append(self.current_tag)
|
||||
|
||||
if name == 'wadl:resource':
|
||||
filename, resource_id = attrs['href'].split("#")
|
||||
dir = path.dirname(self.filename)
|
||||
filepath = path.abspath(path.join(dir, filename))
|
||||
tag_name = '%s#%s' % (filepath, resource_id)
|
||||
self.api_parser.resource_tags[tag_name] = self.current_tag['name']
|
||||
|
||||
if name == 'wadl:resources':
|
||||
if 'href' in attrs:
|
||||
dir = path.dirname(self.filename)
|
||||
filepath = path.abspath(path.join(dir, attrs['href']))
|
||||
self.api_parser.file_tags[filepath] = self.current_tag['name']
|
||||
|
||||
if self.on_top_tag_stack('wadl:resource', 'wadl:method'):
|
||||
resource = self.search_stack_for('wadl:resource')
|
||||
dir = path.dirname(self.filename)
|
||||
filename = resource['href'].split("#")[0]
|
||||
filepath = path.abspath(path.join(dir, filename))
|
||||
method_path = filepath + attrs['href']
|
||||
self.api_parser.method_tags[method_path] = self.current_tag['name']
|
||||
|
||||
fn = getattr(self, 'visit_%s' % name, None)
|
||||
if fn:
|
||||
fn(dict(_attrs))
|
||||
|
||||
def endElement(self, name):
|
||||
content = ''.join(self.content)
|
||||
|
||||
if self.on_top_tag_stack('chapter', 'section', 'title'):
|
||||
self.current_tag['description'] = content.strip()
|
||||
self.content_stack.pop()
|
||||
|
||||
if self.on_top_tag_stack('chapter', 'title'):
|
||||
title = content.strip()
|
||||
match = TITLE_RE.match(title)
|
||||
if match:
|
||||
title, version, version_num, ext, state = match.groups()
|
||||
else:
|
||||
raise Exception("Title %s doesn't match RE" % title)
|
||||
self.api_parser.title = ('%s %s' % (title, ext)).strip()
|
||||
self.content_stack.pop()
|
||||
|
||||
if self.on_top_tag_stack('chapter', 'section'):
|
||||
self.current_tag['summary'] = content.strip()
|
||||
self.content_stack.pop()
|
||||
|
||||
self.tag_stack.pop()
|
||||
self.attr_stack.pop()
|
||||
|
||||
fn = getattr(self, 'depart_%s' % name, None)
|
||||
if fn:
|
||||
fn()
|
||||
|
||||
def characters(self, content):
|
||||
if not content:
|
||||
return
|
||||
# Fold up any white space into a single char
|
||||
if not self.on_top_tag_stack('programlisting'):
|
||||
content = WHITESPACE_RE.sub(' ', content)
|
||||
|
||||
if content == ' ':
|
||||
return
|
||||
if content[0] == '\n':
|
||||
return
|
||||
if self.content:
|
||||
if self.content[-1].endswith('\n'):
|
||||
content = ' ' * self.nesting + content.strip()
|
||||
elif self.content[-1].endswith(' '):
|
||||
content = content.strip()
|
||||
elif (self.on_top_tag_stack('programlisting')):
|
||||
content = '\n' + ' ' * self.nesting + content
|
||||
elif self.no_space:
|
||||
content = '' + content.strip()
|
||||
elif self.hyperlink_end:
|
||||
self.hyperlink_end = False
|
||||
if content == '.' or content == ':':
|
||||
pass
|
||||
else:
|
||||
content = ' ' + content.strip()
|
||||
elif self.markup_end:
|
||||
self.markup_end = False
|
||||
if MARKUP_RE.match(content):
|
||||
pass
|
||||
else:
|
||||
content = ' ' + content.strip()
|
||||
else:
|
||||
content = ' ' + content.strip()
|
||||
|
||||
if self.no_space is True:
|
||||
self.inline_markup_stack.append(content)
|
||||
else:
|
||||
self.content.append(content)
|
||||
|
||||
def visit_listitem(self, attrs):
|
||||
self.nesting = len([tag for tag in self.tag_stack
|
||||
if tag == 'listitem']) - 1
|
||||
if self.nesting > 0:
|
||||
prev_nesting = self.nesting - 1
|
||||
self.base_indent = ' ' * prev_nesting + ' '
|
||||
else:
|
||||
self.base_indent = ' '
|
||||
|
||||
self.content_stack.append([self.base_indent * self.nesting + '-'])
|
||||
self.wrapper = textwrap.TextWrapper(
|
||||
width=self.fill_width,
|
||||
initial_indent=' ',
|
||||
subsequent_indent=self.base_indent * self.nesting + ' ',)
|
||||
|
||||
def depart_listitem(self):
|
||||
content = self.content_stack.pop()
|
||||
self.content.append(''.join(content))
|
||||
if self.content[-1].endswith('\n\n'):
|
||||
pass
|
||||
else:
|
||||
self.content.append('\n')
|
||||
|
||||
self.nesting = len([tag for tag in self.tag_stack
|
||||
if tag == 'listitem']) - 1
|
||||
if self.nesting > 0:
|
||||
prev_nesting = self.nesting - 1
|
||||
self.base_indent = ' ' * prev_nesting + ' '
|
||||
else:
|
||||
self.base_indent = ' '
|
||||
|
||||
def depart_itemizedlist(self):
|
||||
if self.search_stack_for('itemizedlist') is None:
|
||||
self.wrapper = textwrap.TextWrapper(width=self.fill_width)
|
||||
else:
|
||||
self.wrapper = textwrap.TextWrapper(
|
||||
width=self.fill_width,
|
||||
initial_indent=self.base_indent * self.nesting + ' ',
|
||||
subsequent_indent=self.base_indent * self.nesting + ' ',)
|
||||
|
||||
def depart_orderedlist(self):
|
||||
if self.search_stack_for('itemizedlist') is None:
|
||||
self.wrapper = textwrap.TextWrapper(width=self.fill_width)
|
||||
|
||||
def visit_para(self, attrs):
|
||||
self.content_stack.append([''])
|
||||
if self.search_stack_for('itemizedlist') is not None:
|
||||
return
|
||||
if self.content:
|
||||
if self.content[-1].endswith('\n\n'):
|
||||
pass
|
||||
elif self.content[-1].endswith('\n'):
|
||||
self.content.append('\n')
|
||||
|
||||
def depart_para(self):
|
||||
content = ''.join(self.content_stack.pop()).strip()
|
||||
wrapped = self.wrapper.wrap(content)
|
||||
self.content.append('\n'.join(wrapped))
|
||||
self.content.append('\n\n')
|
||||
|
||||
if self.search_stack_for('itemizedlist') is None:
|
||||
pass
|
||||
else:
|
||||
self.wrapper = textwrap.TextWrapper(
|
||||
width=self.fill_width,
|
||||
initial_indent=self.base_indent * self.nesting + ' ',
|
||||
subsequent_indent=self.base_indent * self.nesting + ' ',)
|
||||
|
||||
def visit_code(self, attrs):
|
||||
self.no_space = True
|
||||
|
||||
def depart_code(self):
|
||||
content = ' ``'
|
||||
if self.content:
|
||||
if self.content[-1].endswith('(') or \
|
||||
self.content[-1].endswith(' '):
|
||||
content = '``'
|
||||
content += ' '.join(self.inline_markup_stack[0:None])
|
||||
content += '``'
|
||||
self.content.append(content)
|
||||
self.inline_markup_stack[:] = []
|
||||
self.no_space = False
|
||||
self.markup_end = True
|
||||
|
||||
def visit_emphasis(self, attrs):
|
||||
# Bold is the default emphasis
|
||||
self.current_emphasis = attrs.get('role', 'bold')
|
||||
self.no_space = True
|
||||
|
||||
def depart_emphasis(self):
|
||||
content = ' ' + self.EMPHASIS[self.current_emphasis]
|
||||
if self.content:
|
||||
if self.content[-1].endswith('(') or \
|
||||
self.content[-1].endswith(' '):
|
||||
content = '' + self.EMPHASIS[self.current_emphasis]
|
||||
content += ' '.join(self.inline_markup_stack[0:None])
|
||||
content += self.EMPHASIS[self.current_emphasis]
|
||||
self.content.append(content)
|
||||
self.inline_markup_stack[:] = []
|
||||
self.no_space = False
|
||||
self.current_emphasis = None
|
||||
self.markup_end = True
|
||||
|
||||
def visit_programlisting(self, attrs):
|
||||
if not attrs:
|
||||
self.content.append('::\n\n')
|
||||
else:
|
||||
self.content.append('.. code-block:: %s\n\n' % attrs['language'])
|
||||
self.nesting = 3
|
||||
|
||||
def depart_programlisting(self):
|
||||
self.nesting = 0 # no indent for blank lines
|
||||
self.content.append('\n\n')
|
||||
|
||||
def visit_link(self, attrs):
|
||||
if attrs:
|
||||
self.inline_markup_stack.append(attrs['xlink:href'])
|
||||
self.no_space = True
|
||||
|
||||
def depart_link(self):
|
||||
content = ' `'
|
||||
# anonymous link
|
||||
if len(self.inline_markup_stack) is 1:
|
||||
content += ('<%s>`__' % self.inline_markup_stack[0])
|
||||
else:
|
||||
content += ' '.join(self.inline_markup_stack[1:None])
|
||||
content += (' <%s>`_' % self.inline_markup_stack[0])
|
||||
|
||||
self.content.append(content)
|
||||
self.inline_markup_stack[:] = []
|
||||
self.no_space = False
|
||||
self.hyperlink_end = True
|
||||
|
||||
|
||||
class APIRefContentHandler(xml.sax.ContentHandler):
|
||||
|
||||
def __init__(self, filename):
|
||||
self.filename = filename
|
||||
|
||||
def startDocument(self):
|
||||
self.tags = []
|
||||
self.current_tag = None
|
||||
self.method_tags = {}
|
||||
self.resource_tags = {}
|
||||
self.file_tags = {}
|
||||
|
||||
# general state
|
||||
self.tag_stack = []
|
||||
self.attr_stack = []
|
||||
self.content = None
|
||||
|
||||
def search_stack_for(self, tag_name):
|
||||
for tag, attrs in zip(reversed(self.tag_stack),
|
||||
reversed(self.attr_stack)):
|
||||
if tag == tag_name:
|
||||
return attrs
|
||||
|
||||
def on_top_tag_stack(self, *args):
|
||||
return self.tag_stack[-len(args):] == list(args)
|
||||
|
||||
def startElement(self, name, _attrs):
|
||||
attrs = dict(_attrs)
|
||||
self.tag_stack.append(name)
|
||||
self.attr_stack.append(attrs)
|
||||
self.content = []
|
||||
if self.on_top_tag_stack('book'):
|
||||
id = attrs['xml:id']
|
||||
extensions = False
|
||||
if id.endswith('-ext'):
|
||||
extensions = True
|
||||
id = id.rsplit('-', 1)[0]
|
||||
service, version = id.rsplit('-', 1)
|
||||
if service.startswith('api.openstack.org-'):
|
||||
service = service.split('-', 1)[1]
|
||||
if extensions:
|
||||
service = service + '-extensions'
|
||||
assert VERSION_RE.match(version)
|
||||
self.service = service
|
||||
self.version = version
|
||||
if self.on_top_tag_stack('book', 'xi:include'):
|
||||
filename = attrs['href']
|
||||
dir = path.dirname(self.filename)
|
||||
filepath = path.join(dir, filename)
|
||||
ch = APIChapterContentHandler(filepath, self)
|
||||
xml.sax.parse(filepath, ch)
|
||||
|
||||
def endElement(self, name):
|
||||
self.tag_stack.pop()
|
||||
self.attr_stack.pop()
|
||||
|
||||
def characters(self, content):
|
||||
content = content.strip()
|
||||
if content:
|
||||
self.content.append(content)
|
||||
|
||||
|
||||
def main1(source_file, output_dir):
|
||||
log.info('Parsing %s' % source_file)
|
||||
ch = APIRefContentHandler(source_file)
|
||||
xml.sax.parse(source_file, ch)
|
||||
os.chdir(output_dir)
|
||||
output = {
|
||||
'title': ch.title,
|
||||
'service': ch.service,
|
||||
'version': ch.version,
|
||||
'tags': ch.tags,
|
||||
'method_tags': ch.method_tags,
|
||||
'file_tags': ch.file_tags,
|
||||
'resource_tags': ch.resource_tags,
|
||||
}
|
||||
pathname = 'api-ref-%s-%s.json' % (ch.service,
|
||||
ch.version)
|
||||
with open(pathname, 'w') as out_file:
|
||||
json.dump(output, out_file, indent=2, sort_keys=True)
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument(
|
||||
'-v', '--verbose', action='count', default=0,
|
||||
help="Increase verbosity (specify multiple times for more)")
|
||||
parser.add_argument(
|
||||
'-o', '--output-dir', action='store',
|
||||
help="The directory to output the JSON files too.")
|
||||
parser.add_argument(
|
||||
'filename',
|
||||
help="File to convert")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
log_level = logging.WARNING
|
||||
if args.verbose == 1:
|
||||
log_level = logging.INFO
|
||||
elif args.verbose >= 2:
|
||||
log_level = logging.DEBUG
|
||||
|
||||
logging.basicConfig(
|
||||
level=log_level,
|
||||
format='%(asctime)s %(name)s %(levelname)s %(message)s')
|
||||
|
||||
filename = path.abspath(args.filename)
|
||||
|
||||
main1(filename, output_dir=args.output_dir)
|
|
@ -1,482 +0,0 @@
|
|||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import codecs
|
||||
import docutils.core
|
||||
from docutils import nodes
|
||||
import docutils.parsers.rst
|
||||
import docutils.utils
|
||||
from docutils import writers
|
||||
import json
|
||||
import logging
|
||||
from os import path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JSONTranslator(nodes.SparseNodeVisitor):
|
||||
def __init__(self, document):
|
||||
nodes.NodeVisitor.__init__(self, document)
|
||||
self.output = {}
|
||||
self.node_stack = []
|
||||
self.node_stack.append(self.output)
|
||||
self.current_node_name = None
|
||||
self.bullet_stack = []
|
||||
self.table_stack = []
|
||||
self.text = ''
|
||||
self.col_num = 0
|
||||
self.first_row = 0
|
||||
self.hyperlink_name = ''
|
||||
self.refuri = ''
|
||||
self.listitem = False
|
||||
self.lit_block = False
|
||||
self.list_indent = 0
|
||||
self.text_res_desc = ''
|
||||
|
||||
def visit_document(self, node):
|
||||
self.text = ''
|
||||
|
||||
def depart_document(self, node):
|
||||
if self.text.endswith('\n\n'):
|
||||
self.text = self.text[:-2]
|
||||
self.output = self.text
|
||||
|
||||
def default_visit(self, node):
|
||||
"""Default node visit method."""
|
||||
self.current_node_name = node.__class__.__name__
|
||||
if hasattr(node, 'children') and node.children:
|
||||
new_node = {}
|
||||
self.node_stack[-1][self.current_node_name] = new_node
|
||||
self.node_stack.append(new_node)
|
||||
|
||||
def default_departure(self, node):
|
||||
"""Default node depart method."""
|
||||
if hasattr(node, 'children') and node.children:
|
||||
self.node_stack.pop()
|
||||
|
||||
def visit_system_message(self, node):
|
||||
pass
|
||||
|
||||
def depart_system_message(self, node):
|
||||
pass
|
||||
|
||||
def visit_Text(self, node):
|
||||
if self.first_row is 0:
|
||||
if self.lit_block and len(self.bullet_stack) > 0:
|
||||
litblock = node.astext().split('\n')
|
||||
litblock = '\n '.join(litblock)
|
||||
self.text += litblock
|
||||
else:
|
||||
self.text += node.astext()
|
||||
|
||||
def depart_Text(self, node):
|
||||
pass
|
||||
|
||||
def visit_emphasis(self, node):
|
||||
if self.first_row > 0:
|
||||
inlinetxt = self.table_stack.pop()
|
||||
para = inlinetxt.partition(node.astext())
|
||||
new_para = ''
|
||||
new_para += para[0] + '_' + para[1] + '_' + para[2]
|
||||
self.table_stack.append(new_para)
|
||||
else:
|
||||
self.text += '_'
|
||||
|
||||
def depart_emphasis(self, node):
|
||||
if self.first_row is 0:
|
||||
self.text += '_'
|
||||
|
||||
def visit_literal(self, node):
|
||||
if self.first_row > 0:
|
||||
inlinetxt = self.table_stack.pop()
|
||||
para = inlinetxt.partition(node.astext())
|
||||
new_para = ''
|
||||
new_para += para[0] + '`' + para[1] + '`' + para[2]
|
||||
self.table_stack.append(new_para)
|
||||
else:
|
||||
self.text += '`'
|
||||
|
||||
def depart_literal(self, node):
|
||||
if self.first_row is 0:
|
||||
self.text += '`'
|
||||
|
||||
def visit_strong(self, node):
|
||||
if self.first_row > 0:
|
||||
inlinetxt = self.table_stack.pop()
|
||||
para = inlinetxt.partition(node.astext())
|
||||
new_para = ''
|
||||
new_para += para[0] + '**' + para[1] + '**' + para[2]
|
||||
self.table_stack.append(new_para)
|
||||
else:
|
||||
self.text += '**'
|
||||
|
||||
def depart_strong(self, node):
|
||||
if self.first_row is 0:
|
||||
self.text += '**'
|
||||
|
||||
def visit_literal_block(self, node):
|
||||
if len(self.bullet_stack) > 0:
|
||||
self.text += '\n '
|
||||
else:
|
||||
self.text += '```\n'
|
||||
self.lit_block = True
|
||||
|
||||
def depart_literal_block(self, node):
|
||||
if len(self.bullet_stack) > 0:
|
||||
self.text += '\n'
|
||||
else:
|
||||
self.text += '\n```\n'
|
||||
self.lit_block = False
|
||||
|
||||
def visit_bullet_list(self, node):
|
||||
if self.first_row > 0:
|
||||
self.text += """<ul>"""
|
||||
else:
|
||||
self.bullet_stack.append('*')
|
||||
|
||||
def depart_bullet_list(self, node):
|
||||
if self.first_row > 0:
|
||||
self.text += """</ul>"""
|
||||
else:
|
||||
self.bullet_stack.pop()
|
||||
self.list_indent = len(self.bullet_stack) - 1
|
||||
if len(self.bullet_stack) is 0:
|
||||
self.text += '\n'
|
||||
|
||||
def visit_list_item(self, node):
|
||||
if self.first_row > 0:
|
||||
self.text += """<li>"""
|
||||
else:
|
||||
self.list_indent = len(self.bullet_stack) - 1
|
||||
item = '\n%s%s ' % (' ' * self.list_indent,
|
||||
self.bullet_stack[-1])
|
||||
self.text += item
|
||||
self.listitem = True
|
||||
|
||||
def depart_list_item(self, node):
|
||||
if self.first_row > 0:
|
||||
self.text += """</li>"""
|
||||
else:
|
||||
self.listitem = False
|
||||
self.list_indent = 0
|
||||
|
||||
def visit_title(self, node):
|
||||
self.current_node_name = node.__class__.__name__
|
||||
if self.current_node_name not in self.node_stack[-1]:
|
||||
new_node = []
|
||||
self.node_stack[-1][self.current_node_name] = new_node
|
||||
self.node_stack.append(new_node)
|
||||
|
||||
def depart_title(self, node):
|
||||
self.node_stack.pop()
|
||||
|
||||
def visit_paragraph(self, node):
|
||||
if self.first_row > 0:
|
||||
self.table_stack.append(node.astext())
|
||||
else:
|
||||
# listitem text
|
||||
if self.listitem is True:
|
||||
pass
|
||||
else:
|
||||
# another para in listitem
|
||||
if len(self.bullet_stack) > 0:
|
||||
if self.lit_block:
|
||||
self.text += '\n' + ' '
|
||||
else:
|
||||
self.text += '\n' + ' ' * self.list_indent + ' '
|
||||
|
||||
def depart_paragraph(self, node):
|
||||
if self.first_row is 0:
|
||||
if self.listitem:
|
||||
self.text += '\n'
|
||||
self.listitem = False
|
||||
else:
|
||||
if len(self.bullet_stack) > 0:
|
||||
self.text += "\n"
|
||||
else:
|
||||
# default paragraph
|
||||
self.text += "\n\n"
|
||||
else:
|
||||
if self.first_row > 0:
|
||||
para = self.table_stack.pop()
|
||||
para = para.strip('\n')
|
||||
plist = para.split('\n')
|
||||
|
||||
# multi-line text in single column
|
||||
if len(plist) > 0:
|
||||
self.text += """<br>""".join(plist)
|
||||
else:
|
||||
self.text += para
|
||||
|
||||
def visit_line_block(self, node):
|
||||
if isinstance(self.node_stack[-1], list):
|
||||
return
|
||||
|
||||
self.current_node_name = node.__class__.__name__
|
||||
if self.current_node_name not in self.node_stack[-1]:
|
||||
new_node = []
|
||||
self.node_stack[-1][self.current_node_name] = new_node
|
||||
self.node_stack.append(new_node)
|
||||
else:
|
||||
self.node_stack.append(self.node_stack[-1][self.current_node_name])
|
||||
|
||||
def depart_line_block(self, node):
|
||||
if isinstance(self.node_stack[-1], list):
|
||||
self.node_stack.pop()
|
||||
|
||||
def visit_table(self, node):
|
||||
self.col_num = 0
|
||||
|
||||
def depart_table(self, node):
|
||||
self.text += "\n"
|
||||
|
||||
def visit_tbody(self, node):
|
||||
pass
|
||||
|
||||
def depart_tbody(self, node):
|
||||
self.text += "\n"
|
||||
self.first_row = 0
|
||||
self.col_num = 0
|
||||
|
||||
def visit_thead(self, node):
|
||||
pass
|
||||
|
||||
def depart_thead(self, node):
|
||||
pass
|
||||
|
||||
def visit_tgroup(self, node):
|
||||
pass
|
||||
|
||||
def depart_tgroup(self, node):
|
||||
pass
|
||||
|
||||
def visit_colspec(self, node):
|
||||
pass
|
||||
|
||||
def depart_colspec(self, node):
|
||||
pass
|
||||
|
||||
def visit_row(self, node):
|
||||
if self.first_row is 1 and self.col_num > 0:
|
||||
row_separator = [' --- '] * self.col_num
|
||||
self.text += "|"
|
||||
sep_row = "|".join(row_separator)
|
||||
self.text += sep_row
|
||||
self.text += "|"
|
||||
self.text += "\n"
|
||||
|
||||
self.text += "|"
|
||||
self.first_row += 1
|
||||
|
||||
def depart_row(self, node):
|
||||
self.text += "\n"
|
||||
|
||||
def visit_entry(self, node):
|
||||
self.text += " "
|
||||
|
||||
def depart_entry(self, node):
|
||||
self.text += " |"
|
||||
self.col_num += 1
|
||||
|
||||
def visit_definition(self, node):
|
||||
pass
|
||||
|
||||
def depart_definition(self, node):
|
||||
pass
|
||||
|
||||
def visit_definition_list(self, node):
|
||||
pass
|
||||
|
||||
def depart_definition_list(self, node):
|
||||
pass
|
||||
|
||||
def visit_definition_list_item(self, node):
|
||||
pass
|
||||
|
||||
def depart_definition_list_item(self, node):
|
||||
pass
|
||||
|
||||
def visit_term(self, node):
|
||||
self.text += " "
|
||||
if self.first_row is 0:
|
||||
self.text += node.astext()
|
||||
else:
|
||||
self.table_stack.append(node.astext())
|
||||
|
||||
def depart_term(self, node):
|
||||
if self.first_row > 0:
|
||||
self.text += self.table_stack.pop()
|
||||
self.text += """<br>"""
|
||||
|
||||
def visit_reference(self, node):
|
||||
self.hyperlink_name = node.attributes['name']
|
||||
self.refuri = node.attributes['refuri']
|
||||
self.text += '['
|
||||
|
||||
def depart_reference(self, node):
|
||||
if self.hyperlink_name:
|
||||
self.text += ']'
|
||||
self.text += '(' + self.refuri + ')'
|
||||
else:
|
||||
self.text += '[' + self.refuri + ']'
|
||||
|
||||
self.hyperlink_name = ''
|
||||
self.refuri = ''
|
||||
|
||||
|
||||
class JSONWriter(writers.Writer):
|
||||
|
||||
supported = ('json',)
|
||||
"""Formats this writer supports."""
|
||||
|
||||
settings_spec = (
|
||||
'"Docutils JSON" Writer Options',
|
||||
None,
|
||||
[])
|
||||
|
||||
config_section = 'docutils_json writer'
|
||||
config_section_dependencies = ('writers',)
|
||||
|
||||
output = None
|
||||
|
||||
def __init__(self):
|
||||
writers.Writer.__init__(self)
|
||||
self.translator_class = JSONTranslator
|
||||
|
||||
def set_doc(self, doc):
|
||||
self.document = doc
|
||||
|
||||
def translate(self):
|
||||
self.visitor = visitor = self.translator_class(self.document)
|
||||
self.document.walkabout(visitor)
|
||||
self.output = visitor.output
|
||||
|
||||
|
||||
class error_writer(object):
|
||||
|
||||
def write(self, line):
|
||||
logger.warning(line.strip())
|
||||
|
||||
|
||||
def publish_string(string):
|
||||
optionP = docutils.frontend.OptionParser(
|
||||
components=(docutils.parsers.rst.Parser,))
|
||||
values = optionP.get_default_values()
|
||||
values.update({'output_encoding': 'unicode'}, optionP)
|
||||
document = docutils.utils.new_document(string, settings=values)
|
||||
parser.parse(string, document)
|
||||
settings_overrides = {'warning_stream': error_writer(),
|
||||
'output_encoding': 'unicode'}
|
||||
json_writer = JSONWriter()
|
||||
json_writer.set_doc(document)
|
||||
|
||||
return docutils.core.publish_string(
|
||||
string, writer=json_writer,
|
||||
settings_overrides=settings_overrides)
|
||||
|
||||
|
||||
parser = docutils.parsers.rst.Parser()
|
||||
|
||||
|
||||
def main1(filename, output_dir):
|
||||
logger.info('Loading %s' % filename)
|
||||
swagger = json.load(open(filename))
|
||||
write_md(swagger, output_dir)
|
||||
|
||||
|
||||
def write_md(swagger, output_dir):
|
||||
info = swagger['info']
|
||||
version = info['version']
|
||||
service = info['x-service']
|
||||
output_file = '%s-%s-swagger-md.json' % (service, version)
|
||||
filepath = path.join(output_dir, output_file)
|
||||
logger.info('Output file: %s' % filepath)
|
||||
|
||||
# convert tag x-summary
|
||||
for tag in swagger['tags']:
|
||||
new_desc = publish_string(tag['x-summary'])
|
||||
tag['x-summary'] = new_desc
|
||||
|
||||
for paths, methods in swagger['paths'].items():
|
||||
for method, val in methods.items():
|
||||
# convert method description
|
||||
new_desc = publish_string(val['description'])
|
||||
val['description'] = new_desc
|
||||
|
||||
# convert method summary
|
||||
if 'summary' in val:
|
||||
new_desc = publish_string(val['summary'])
|
||||
val['summary'] = new_desc
|
||||
|
||||
# convert method x-title
|
||||
new_desc = publish_string(val['x-title'])
|
||||
val['x-title'] = new_desc
|
||||
|
||||
# convert operation parameter descriptions
|
||||
for p in val['parameters']:
|
||||
new_desc = publish_string(p['description'])
|
||||
p['description'] = new_desc
|
||||
|
||||
# convert response header descriptions
|
||||
for p, values in val['responses'].items():
|
||||
if 'headers' in values:
|
||||
headers = values['headers']
|
||||
for hkey, hval in headers.items():
|
||||
new_desc = publish_string(hval['description'])
|
||||
hval['description'] = new_desc
|
||||
|
||||
# convert property descriptions in definitions
|
||||
for ops, vals in swagger['definitions'].items():
|
||||
for k, props in vals['properties'].items():
|
||||
new_desc = publish_string(props['description'])
|
||||
props['description'] = new_desc
|
||||
|
||||
with codecs.open(filepath,
|
||||
'w', "utf-8") as out_file:
|
||||
json.dump(swagger, out_file, indent=2, sort_keys=True)
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument(
|
||||
'-v', '--verbose', action='count', default=0,
|
||||
help="Increase verbosity (specify multiple times for more)")
|
||||
parser.add_argument(
|
||||
'-o', '--output-dir', action='store',
|
||||
help="The directory to output the JSON files too.")
|
||||
parser.add_argument(
|
||||
'filename',
|
||||
help="File to convert")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
log_level = logging.WARNING
|
||||
if args.verbose == 1:
|
||||
log_level = logging.INFO
|
||||
elif args.verbose >= 2:
|
||||
log_level = logging.DEBUG
|
||||
|
||||
logging.basicConfig(
|
||||
level=log_level,
|
||||
format='%(asctime)s %(name)s %(levelname)s %(message)s')
|
||||
|
||||
filename = path.abspath(args.filename)
|
||||
|
||||
main1(filename, output_dir=args.output_dir)
|
|
@ -1,319 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import codecs
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from os import path
|
||||
import textwrap
|
||||
|
||||
from jinja2 import Environment
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
TMPL_TAG = """
|
||||
{%- for tag in swagger.tags -%}
|
||||
|
||||
.. swagger:tag:: {{tag.name}}
|
||||
:synopsis: {{tag.description}}
|
||||
{% for line in tag['x-summary'].split('\n') %}
|
||||
{{line}}
|
||||
{%- endfor %}
|
||||
|
||||
{% endfor %}
|
||||
"""
|
||||
|
||||
TMPL_API = """
|
||||
{%- for path, methods in swagger['paths'].items() -%}
|
||||
{%- for method_name, request in methods.items() -%}
|
||||
|
||||
{%- if method_name.startswith('x-') -%}
|
||||
.. http:{{ method_name|remove_prefix(request['operationId']) }}:: {{path}}
|
||||
{%- else %}
|
||||
.. http:{{method_name}}:: {{path}}
|
||||
{%- endif %}
|
||||
:title: {{request['x-title']}}
|
||||
:synopsis: {{request['summary']}}
|
||||
{%- if request.description != '' %}
|
||||
{% for line in request.description.split('\n') %}
|
||||
{{line}}
|
||||
{%- endfor %}
|
||||
{%- endif %}
|
||||
{% for status_code, response in request.responses.items() %}
|
||||
{%- if response['examples']['application/json'] %}
|
||||
:responseexample {{status_code}}: {{version}}/examples/{{request['operationId']}}_resp_{{status_code}}.json
|
||||
{%- endif -%}
|
||||
{%- if response['examples']['text/plain'] %}
|
||||
:responseexample {{status_code}}: {{version}}/examples/{{request['operationId']}}_resp_{{status_code}}.txt
|
||||
{%- endif -%}
|
||||
{%- if response['schema']['$ref'] %}
|
||||
:responseschema {{status_code}}: {{version}}/{{response['schema']['$ref'].rsplit('/', 1)[1]}}.json
|
||||
{%- endif -%}
|
||||
{% endfor -%}
|
||||
{% for mime in request.consumes %}
|
||||
:accepts: {{mime}}
|
||||
{%- endfor -%}
|
||||
{% for mime in request.produces %}
|
||||
:produces: {{mime}}
|
||||
{%- endfor -%}
|
||||
{% for tag in request.tags %}
|
||||
:tag: {{tag}}
|
||||
{%- endfor -%}
|
||||
{% for parameter in request.parameters -%}
|
||||
{% if parameter.in == 'body' -%}
|
||||
{% if parameter.schema %}
|
||||
:requestschema: {{version}}/{{request['operationId']}}.json
|
||||
{%- for id, schema in swagger['definitions'].items() -%}
|
||||
{%- if id == request['operationId'] -%}
|
||||
{%- if 'example' in schema -%}
|
||||
{%- if schema['example']['application/json'] %}
|
||||
:requestexample: {{version}}/examples/{{request['operationId']}}_req.json
|
||||
{%- endif -%}
|
||||
{%- if schema['example']['text/plain'] %}
|
||||
:requestexample: {{version}}/examples/{{request['operationId']}}_req.txt
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
{%- elif parameter.in == 'path' %}
|
||||
{{ parameter|format_param('parameter') }}
|
||||
{%- elif parameter.in == 'query' %}
|
||||
{{ parameter|format_param('query') }}
|
||||
{%- elif parameter.in == 'header' %}
|
||||
{{ parameter|format_param('reqheader') }}
|
||||
{%- endif %}
|
||||
{%- endfor -%}
|
||||
{% for status_code, response in request['responses'].items() %}
|
||||
:statuscode {{status_code}}: {{response.description}}
|
||||
{%- endfor %}
|
||||
|
||||
|
||||
{% endfor %}
|
||||
{%- endfor %}
|
||||
""" # noqa
|
||||
|
||||
environment = Environment()
|
||||
|
||||
|
||||
def format_param(obj, type='query'):
|
||||
param = ' :%s %s: ' % (type, obj['name'])
|
||||
param_wrap = textwrap.TextWrapper(
|
||||
initial_indent=param,
|
||||
subsequent_indent=' ' * len(param))
|
||||
if ('::\n\n' in obj['description']) or \
|
||||
('.. code-block::' in obj['description']):
|
||||
param_len = len(param)
|
||||
for i, line in enumerate(obj['description'].split('\n')):
|
||||
if i is 0:
|
||||
param += line + '\n'
|
||||
else:
|
||||
param += ' ' * param_len + line + '\n'
|
||||
return param
|
||||
else:
|
||||
new_text = param_wrap.wrap(obj['description'])
|
||||
return '\n'.join(new_text)
|
||||
|
||||
|
||||
def remove_prefix(obj, op_name):
|
||||
if obj[2:].endswith(op_name):
|
||||
return obj[2:(len(obj) - len(op_name) - 1)]
|
||||
else:
|
||||
return obj[2:]
|
||||
|
||||
environment.filters['format_param'] = format_param
|
||||
environment.filters['remove_prefix'] = remove_prefix
|
||||
|
||||
|
||||
def main1(filename, output_dir):
|
||||
log.info('Parsing %s' % filename)
|
||||
swagger = json.load(open(filename))
|
||||
write_rst(swagger, output_dir)
|
||||
write_jsonschema(swagger, output_dir)
|
||||
write_examples(swagger, output_dir)
|
||||
write_index(swagger, output_dir)
|
||||
|
||||
|
||||
def write_index(swagger, output_dir):
|
||||
info = swagger['info']
|
||||
service = info['x-service']
|
||||
|
||||
# Web UI uses 'service' field for indexing
|
||||
del info['x-service']
|
||||
info['service'] = service
|
||||
version = info['version']
|
||||
output_file = 'index.json'
|
||||
filepath = path.join(output_dir, output_file)
|
||||
log.info("Writing APIs %s", filepath)
|
||||
if path.exists(filepath):
|
||||
index = json.load(open(filepath))
|
||||
else:
|
||||
index = {}
|
||||
index['/'.join([service, version, ''])] = info
|
||||
with codecs.open(filepath,
|
||||
'w', "utf-8") as out_file:
|
||||
json.dump(index, out_file, indent=2)
|
||||
|
||||
|
||||
def write_rst(swagger, output_dir):
|
||||
environment.extend(swagger_info=swagger['info'])
|
||||
write_apis(swagger, output_dir)
|
||||
write_tags(swagger, output_dir)
|
||||
|
||||
|
||||
def write_apis(swagger, output_dir):
|
||||
info = swagger['info']
|
||||
version = info['version']
|
||||
service = info['x-service']
|
||||
service_path = path.join(output_dir, service)
|
||||
output_file = '%s.rst' % version
|
||||
if not path.exists(service_path):
|
||||
os.makedirs(service_path)
|
||||
TMPL = environment.from_string(TMPL_API)
|
||||
result = TMPL.render(swagger=swagger,
|
||||
version=swagger['info']['version'])
|
||||
filepath = path.join(service_path, output_file)
|
||||
log.info("Writing APIs %s", filepath)
|
||||
with codecs.open(filepath,
|
||||
'w', "utf-8") as out_file:
|
||||
out_file.write(result)
|
||||
|
||||
|
||||
def write_tags(swagger, output_dir):
|
||||
info = swagger['info']
|
||||
version = info['version']
|
||||
service = info['x-service']
|
||||
service_path = path.join(output_dir, service)
|
||||
if not path.exists(service_path):
|
||||
os.makedirs(service_path)
|
||||
output_file = '%s-tags.rst' % version
|
||||
TMPL = environment.from_string(TMPL_TAG)
|
||||
result = TMPL.render(swagger=swagger,
|
||||
version=swagger['info']['version'])
|
||||
filepath = path.join(service_path, output_file)
|
||||
log.info("Writing Tags %s", filepath)
|
||||
with codecs.open(filepath,
|
||||
'w', "utf-8") as out_file:
|
||||
out_file.write(result)
|
||||
|
||||
|
||||
def write_jsonschema(swagger, output_dir):
|
||||
info = swagger['info']
|
||||
version = info['version']
|
||||
service = info['x-service']
|
||||
service_path = path.join(output_dir, service)
|
||||
full_path = path.join(service_path, version)
|
||||
if not path.exists(service_path):
|
||||
os.makedirs(service_path)
|
||||
if not path.exists(full_path):
|
||||
os.makedirs(full_path)
|
||||
|
||||
for schema_name, schema in swagger['definitions'].items():
|
||||
filename = '%s.json' % schema_name
|
||||
filepath = path.join(full_path, filename)
|
||||
log.info("Writing %s", filepath)
|
||||
file = open(filepath, 'w')
|
||||
json.dump(schema, file, indent=2)
|
||||
|
||||
|
||||
def write_examples(swagger, output_dir):
|
||||
info = swagger['info']
|
||||
version = info['version']
|
||||
service = info['x-service']
|
||||
service_path = path.join(output_dir, service)
|
||||
versioned_path = path.join(service_path, version)
|
||||
full_path = path.join(versioned_path, 'examples')
|
||||
if not path.exists(service_path):
|
||||
os.makedirs(service_path)
|
||||
if not path.exists(versioned_path):
|
||||
os.makedirs(versioned_path)
|
||||
if not path.exists(full_path):
|
||||
os.makedirs(full_path)
|
||||
|
||||
for paths in swagger['paths'].values():
|
||||
for operation in paths.values():
|
||||
for status_code, response in operation['responses'].items():
|
||||
for mime, example in response['examples'].items():
|
||||
filename = '%s' % '_'.join([operation['operationId'],
|
||||
'resp',
|
||||
status_code])
|
||||
if mime == 'application/json':
|
||||
filepath = path.join(full_path, filename + '.json')
|
||||
log.info("Writing %s", filepath)
|
||||
file = open(filepath, 'w')
|
||||
json.dump(example, file, indent=2)
|
||||
if mime == 'text/plain':
|
||||
filepath = path.join(full_path, filename + '.txt')
|
||||
log.info("Writing %s", filepath)
|
||||
example = example.strip()
|
||||
example = example + '\n'
|
||||
file = open(filepath, 'w')
|
||||
file.write(example)
|
||||
|
||||
for ids, schemas in swagger['definitions'].items():
|
||||
if 'example' in schemas:
|
||||
for mime, example in schemas['example'].items():
|
||||
filename = '%s' % '_'.join(
|
||||
[ids, 'req'])
|
||||
if mime == 'application/json':
|
||||
filepath = path.join(full_path, filename + '.json')
|
||||
log.info("Writing %s", filepath)
|
||||
file = open(filepath, 'w')
|
||||
json.dump(example, file, indent=2)
|
||||
if mime == 'text/plain':
|
||||
filepath = path.join(full_path, filename + '.txt')
|
||||
log.info("Writing %s", filepath)
|
||||
example = example.strip()
|
||||
example = example + '\n'
|
||||
file = open(filepath, 'w')
|
||||
file.write(example)
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument(
|
||||
'-v', '--verbose', action='count', default=0,
|
||||
help="Increase verbosity (specify multiple times for more)")
|
||||
parser.add_argument(
|
||||
'-o', '--output-dir', action='store',
|
||||
help="The directory to output the JSON files too.")
|
||||
parser.add_argument(
|
||||
'filename',
|
||||
help="File to convert")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
log_level = logging.WARNING
|
||||
if args.verbose == 1:
|
||||
log_level = logging.INFO
|
||||
elif args.verbose >= 2:
|
||||
log_level = logging.DEBUG
|
||||
|
||||
logging.basicConfig(
|
||||
level=log_level,
|
||||
format='%(asctime)s %(name)s %(levelname)s %(message)s')
|
||||
|
||||
filename = path.abspath(args.filename)
|
||||
|
||||
main1(filename, output_dir=args.output_dir)
|
|
@ -1,287 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from collections import defaultdict
|
||||
import json
|
||||
import logging
|
||||
from os import path
|
||||
import re
|
||||
import six
|
||||
try:
|
||||
import urlparse
|
||||
except ImportError:
|
||||
import urllib.parse as urlparse
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_PORTS = {
|
||||
'5000': 'identity',
|
||||
'35357': 'identity',
|
||||
'8774': 'compute',
|
||||
'8776': 'volume',
|
||||
'8773': 'compute-ec2',
|
||||
'9292': 'image',
|
||||
'9696': 'networking',
|
||||
'9292': 'image',
|
||||
'8082': 'application-catalog',
|
||||
'8004': 'orchestration',
|
||||
'8080': 'object',
|
||||
'8777': 'telemetry',
|
||||
}
|
||||
|
||||
PYTHON_LOG_PREFIX_RE = ("^(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3}) \d+ "
|
||||
"(?P<log_level>[A-Z]+) (?P<logger_name>\S+) "
|
||||
"\[(?P<tags>[^\[\]]+)\] ")
|
||||
|
||||
REQUEST_RE = re.compile(PYTHON_LOG_PREFIX_RE + "Request (?P<test>\([^()]+\)):"
|
||||
" (?P<status_code>\d+)"
|
||||
" (?P<method>[A-Z]+) (?P<url>\S+)")
|
||||
|
||||
REQUEST1_RE = re.compile(PYTHON_LOG_PREFIX_RE + "Request")
|
||||
|
||||
RUBBISH_LINE_RE = re.compile("^ _log_request_full \S+:\d+$")
|
||||
|
||||
REQ_RE = re.compile('(req-[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}'
|
||||
'-[a-z0-9]{4}-[a-z0-9]{12})')
|
||||
|
||||
|
||||
class DB(object):
|
||||
|
||||
def __init__(self):
|
||||
self.requests = {}
|
||||
self.responses = {}
|
||||
|
||||
def _normalize_headers(self, headers):
|
||||
return {k.lower(): v for k, v in headers.items()}
|
||||
|
||||
def exists(self, req):
|
||||
if req in self.requests:
|
||||
return True
|
||||
return False
|
||||
|
||||
def create(self, req, request):
|
||||
url = urlparse.urlsplit(request['url'])
|
||||
port = url.netloc.split(':')[-1]
|
||||
service = DEFAULT_PORTS[port]
|
||||
|
||||
self.requests[req] = {
|
||||
'service': service,
|
||||
'url': urlparse.urlunsplit(('', '') + url[2:]),
|
||||
'method': request['method']}
|
||||
self.responses[req] = {
|
||||
'status_code': request['status_code']}
|
||||
|
||||
def set_request_headers(self, req, headers):
|
||||
self.requests[req]['headers'] = self._normalize_headers(headers)
|
||||
|
||||
def set_response_headers(self, req, headers):
|
||||
self.responses[req]['headers'] = self._normalize_headers(headers)
|
||||
|
||||
def get_req_or_resp_length(self, req):
|
||||
if self.responses[req].get('headers') is not None:
|
||||
return int(self.responses[req]['headers'].get('content-length', 0))
|
||||
else:
|
||||
return int(self.requests[req]['headers'].get('content-length', 0))
|
||||
|
||||
def set_req_or_resp_length(self, req_id, content_length):
|
||||
if self.responses[req_id].get('headers'):
|
||||
h = self.responses[req_id].get('headers')
|
||||
h['content-length'] = content_length
|
||||
self.responses[req_id]['headers'] = h
|
||||
else:
|
||||
if self.requests[req_id].get('headers'):
|
||||
h = self.requests[req_id].get('headers')
|
||||
h['content-length'] = content_length
|
||||
self.requests[req_id]['headers'] = h
|
||||
|
||||
def get_req_or_resp_content_type(self, req):
|
||||
if self.responses[req].get('headers') is not None:
|
||||
return self.responses[req]['headers'].get('content-type',
|
||||
'text/plain')
|
||||
else:
|
||||
return self.requests[req]['headers'].get('content-type',
|
||||
'text/plain')
|
||||
|
||||
def append_req_or_resp_body(self, req_id, string):
|
||||
if self.responses[req_id].get('headers') is not None:
|
||||
self.responses[req_id]['body'] += string
|
||||
else:
|
||||
self.requests[req_id]['body'] += string
|
||||
|
||||
def set_req_or_resp_body(self, req_id, string):
|
||||
if self.responses[req_id].get('headers') is not None:
|
||||
self.responses[req_id]['body'] = string
|
||||
else:
|
||||
self.requests[req_id]['body'] = string
|
||||
|
||||
|
||||
def parse_logfile(log_file):
|
||||
"""Yet another shonky stream parser."""
|
||||
content_length = 0
|
||||
current_req_id = ''
|
||||
db = DB()
|
||||
for line in log_file:
|
||||
if RUBBISH_LINE_RE.match(line):
|
||||
continue
|
||||
request = REQUEST_RE.match(line)
|
||||
if request:
|
||||
request_dict = request.groupdict()
|
||||
try:
|
||||
current_req_id = REQ_RE.match(request_dict['tags']).groups()[0]
|
||||
except AttributeError:
|
||||
# Swift calls don't have the req tag
|
||||
current_req_id = ''
|
||||
if current_req_id:
|
||||
db.create(current_req_id, request_dict)
|
||||
else:
|
||||
start_request = REQUEST1_RE.match(line)
|
||||
if start_request:
|
||||
line = re.sub(PYTHON_LOG_PREFIX_RE, '', line)
|
||||
try:
|
||||
current_req_id = REQ_RE.match(
|
||||
start_request.groupdict()['tags']).groups()[0]
|
||||
except AttributeError:
|
||||
# Swift calls don't have the req tag
|
||||
current_req_id = ''
|
||||
|
||||
# Skip all boto logs
|
||||
if 'boto' == start_request.groupdict()['logger_name']:
|
||||
current_req_id = ''
|
||||
try:
|
||||
key, value = line.split(':', 1)
|
||||
except ValueError:
|
||||
# For some wacky reason, when you request JSON,
|
||||
# sometimes you get text. Handle this rad behaviour.
|
||||
if not current_req_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
db.append_req_or_resp_body(current_req_id, line)
|
||||
except TypeError:
|
||||
log.warning('Failed to find body to add to.')
|
||||
continue
|
||||
|
||||
if not current_req_id:
|
||||
continue
|
||||
|
||||
if not db.exists(current_req_id):
|
||||
log.warning("Can't find request %r" % current_req_id)
|
||||
current_req_id = ''
|
||||
continue
|
||||
|
||||
key = key.strip()
|
||||
value = value.strip()
|
||||
|
||||
if key == 'Request - Headers':
|
||||
if current_req_id:
|
||||
db.set_request_headers(current_req_id, eval(value))
|
||||
if key == 'Response - Headers':
|
||||
if current_req_id:
|
||||
db.set_response_headers(current_req_id, eval(value))
|
||||
if key == 'Body':
|
||||
if not current_req_id:
|
||||
continue
|
||||
|
||||
content_length = db.get_req_or_resp_length(current_req_id)
|
||||
content_type = db.get_req_or_resp_content_type(current_req_id)
|
||||
|
||||
# Trim any messages that are by accident on the end of
|
||||
# the line
|
||||
if '_log_request_full' in value:
|
||||
value = value.split('_log_request_full')[0]
|
||||
if '_check_samples' in value:
|
||||
value = value.split('_check_samples')[0]
|
||||
|
||||
if content_length == 0:
|
||||
body = None
|
||||
elif value[:4] == 'None':
|
||||
body = None
|
||||
elif 'application/json' in content_type:
|
||||
try:
|
||||
body = json.loads(value)
|
||||
except ValueError:
|
||||
body = value
|
||||
log.warning("Failed to as JSON %r", value)
|
||||
try:
|
||||
body = eval(value)
|
||||
log.warning("Succeed parsing as Python %r", value)
|
||||
except Exception:
|
||||
body = value
|
||||
|
||||
if not isinstance(body, six.string_types):
|
||||
body = json.dumps(body, indent=2,
|
||||
sort_keys=True,
|
||||
separators=(',', ': '))
|
||||
else:
|
||||
body = value
|
||||
|
||||
if body is None:
|
||||
content_length = '0'
|
||||
else:
|
||||
content_length = str(len(body))
|
||||
db.set_req_or_resp_body(current_req_id, body)
|
||||
db.set_req_or_resp_length(current_req_id, content_length)
|
||||
|
||||
return db
|
||||
|
||||
|
||||
def main1(log_file, output_dir):
|
||||
log.info('Reading %s' % log_file)
|
||||
calls = parse_logfile(open(log_file))
|
||||
services = defaultdict(list)
|
||||
for req in calls.requests:
|
||||
call = (calls.requests[req], calls.responses[req])
|
||||
services[call[0]['service']].append(call)
|
||||
for service, calls in services.items():
|
||||
pathname = path.join(output_dir, '%s-examples.json' % (service))
|
||||
with open(pathname, 'w') as out_file:
|
||||
json.dump(calls, out_file, indent=2)
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument(
|
||||
'-v', '--verbose', action='count', default=0,
|
||||
help="Increase verbosity (specify multiple times for more)")
|
||||
parser.add_argument(
|
||||
'-o', '--output-dir', action='store',
|
||||
help="The directory to output the JSON files too.")
|
||||
parser.add_argument(
|
||||
'filename',
|
||||
help="File to convert")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
log_level = logging.WARNING
|
||||
if args.verbose == 1:
|
||||
log_level = logging.INFO
|
||||
elif args.verbose >= 2:
|
||||
log_level = logging.DEBUG
|
||||
|
||||
logging.basicConfig(
|
||||
level=log_level,
|
||||
format='%(asctime)s %(name)s %(levelname)s %(message)s')
|
||||
|
||||
filename = path.abspath(args.filename)
|
||||
|
||||
main1(filename, output_dir=args.output_dir)
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,170 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
from os import path
|
||||
|
||||
import docutils.core
|
||||
from pecan import conf
|
||||
from pecan import expose
|
||||
from pecan import response
|
||||
from webob.exc import status_map
|
||||
from webob.static import FileIter
|
||||
|
||||
from fairy_slipper import hooks
|
||||
from fairy_slipper.rest import JSONWriter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JSONFileController(object):
|
||||
|
||||
__hooks__ = [hooks.CORSHook()]
|
||||
|
||||
def __init__(self, filepath):
|
||||
self.filepath = filepath
|
||||
|
||||
@expose('json')
|
||||
@expose(content_type='text/plain')
|
||||
def _default(self):
|
||||
if path.exists(self.filepath + '.json'):
|
||||
self.filepath = self.filepath + '.json'
|
||||
if path.exists(self.filepath + '.txt'):
|
||||
self.filepath = self.filepath + '.txt'
|
||||
if not path.exists(self.filepath):
|
||||
response.status = 404
|
||||
return response
|
||||
response.app_iter = FileIter(open(self.filepath, 'rb'))
|
||||
return response
|
||||
|
||||
|
||||
class DocController(object):
|
||||
|
||||
__hooks__ = [hooks.CORSHook()]
|
||||
|
||||
def __init__(self, service_path, service_info):
|
||||
self.service_info = service_info
|
||||
base_filepath = path.join(conf.app.api_doc, service_path.rstrip('/'))
|
||||
self.api_rst = base_filepath + '.rst'
|
||||
self.tags_rst = base_filepath + '-tags.rst'
|
||||
self.examples_dir = path.join(base_filepath, 'examples') + path.sep
|
||||
self.schema_dir = base_filepath + path.sep
|
||||
if not path.exists(self.api_rst):
|
||||
logger.warning("Can't find ReST API doc at %s", self.api_rst)
|
||||
if not path.exists(self.tags_rst):
|
||||
logger.warning("Can't find ReST TAG doc at %s", self.tags_rst)
|
||||
|
||||
@expose('json')
|
||||
def index(self):
|
||||
if path.exists(self.tags_rst) and path.exists(self.api_rst):
|
||||
rst = open(self.api_rst).read() + \
|
||||
"\n\n" + open(self.tags_rst).read()
|
||||
elif path.exists(self.api_rst):
|
||||
rst = open(self.api_rst).read()
|
||||
else:
|
||||
logger.warning("Can't find ReST documents to render.")
|
||||
return {}
|
||||
|
||||
json = docutils.core.publish_string(rst, writer=JSONWriter())
|
||||
|
||||
return {'info': self.service_info,
|
||||
'paths': json['paths'],
|
||||
'tags': json['tags']}
|
||||
|
||||
@expose()
|
||||
def _lookup(self, *components):
|
||||
if len(components) != 2 and len(components) != 3:
|
||||
return
|
||||
|
||||
if components[0] == 'examples':
|
||||
example = components[1]
|
||||
filepath = path.join(self.examples_dir, example)
|
||||
return JSONFileController(filepath), []
|
||||
else:
|
||||
filename = components[0]
|
||||
print(filename)
|
||||
filepath = path.join(self.schema_dir, filename)
|
||||
return JSONFileController(filepath), []
|
||||
|
||||
|
||||
class ServicesController(object):
|
||||
|
||||
def __init__(self):
|
||||
filepath = path.join(conf.app.api_doc, 'index.json')
|
||||
self.url_map = {}
|
||||
if not path.exists(filepath):
|
||||
logger.error("Can't find documentation at %s", filepath)
|
||||
self.services_info = {}
|
||||
return
|
||||
try:
|
||||
self.services_info = json.load(open(filepath))
|
||||
except ValueError:
|
||||
logger.error("Failed to load %s", filepath)
|
||||
raise
|
||||
for key, info in self.services_info.items():
|
||||
# Add the path into each element, this is to make
|
||||
# consumption by the JS client easier.
|
||||
info['url'] = key
|
||||
|
||||
current_map = self.url_map
|
||||
previous_map = None
|
||||
for part in [k for k in key.split('/') if k]:
|
||||
if part not in current_map:
|
||||
current_map[part] = {}
|
||||
previous_map = current_map
|
||||
current_map = current_map[part]
|
||||
else:
|
||||
previous_map[part] = DocController(key, info)
|
||||
|
||||
@expose('json')
|
||||
def index(self):
|
||||
return list(self.services_info.values())
|
||||
|
||||
@expose('json')
|
||||
def _lookup(self, *components):
|
||||
url_map = self.url_map
|
||||
url_walk = itertools.chain(components)
|
||||
for component in url_walk:
|
||||
if component in url_map:
|
||||
url_map = url_map[component]
|
||||
else:
|
||||
break
|
||||
if isinstance(url_map, DocController):
|
||||
return url_map, [u for u in url_walk]
|
||||
|
||||
|
||||
class RootController(object):
|
||||
|
||||
def __init__(self):
|
||||
self.doc = ServicesController()
|
||||
|
||||
@expose(content_type='text/html')
|
||||
def index(self):
|
||||
filepath = path.join(conf.app.static_root, 'index.html')
|
||||
f = open(filepath, 'rb')
|
||||
response.app_iter = FileIter(f)
|
||||
|
||||
@expose('error.html')
|
||||
def error(self, status):
|
||||
try:
|
||||
status = int(status)
|
||||
except ValueError:
|
||||
status = 0
|
||||
message = getattr(status_map.get(status), 'explanation', '')
|
||||
return dict(status=status, message=message)
|
|
@ -1,102 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import operator
|
||||
import textwrap
|
||||
|
||||
import docutils.core
|
||||
from pecan import expose
|
||||
from pecan.hooks import HookController
|
||||
import routes
|
||||
|
||||
from fairy_slipper import hooks
|
||||
from fairy_slipper.rest import JSONWriter
|
||||
from paste.deploy import util as paste_util
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VersionAPIController(object):
|
||||
|
||||
def __init__(self, versions):
|
||||
self.versions = versions
|
||||
|
||||
@expose()
|
||||
def _lookup(self, id_, *remainder):
|
||||
LOG.error(id_)
|
||||
if id_ in self.versions:
|
||||
return DocSpecController(id_, self.versions[id_]), remainder
|
||||
|
||||
@expose(generic=True, template='json')
|
||||
def index(self):
|
||||
return self.versions.keys()
|
||||
|
||||
|
||||
class DocSpecController(HookController):
|
||||
|
||||
__hooks__ = [hooks.CORSHook()]
|
||||
|
||||
def __init__(self, version, router):
|
||||
# TODO(RS) this had to be hardcoded, to match the murano
|
||||
# factory method. Perhaps there is a better way to get it to
|
||||
# work using the factory?
|
||||
self.version = version
|
||||
self.api = paste_util.lookup_object(router)(routes.Mapper())
|
||||
super(DocSpecController, self).__init__()
|
||||
|
||||
@expose(generic=True, template='json')
|
||||
def index(self):
|
||||
routes = {}
|
||||
for route in self.api.map.matchlist:
|
||||
if 'controller' not in route.defaults:
|
||||
continue
|
||||
|
||||
key = (id(route.defaults['controller']),
|
||||
route.defaults['action'])
|
||||
|
||||
controller = route.defaults['controller'].controller
|
||||
action = route.defaults['action']
|
||||
|
||||
if not hasattr(controller, action):
|
||||
continue
|
||||
|
||||
if route.routepath.endswith('.:(format)'):
|
||||
continue
|
||||
|
||||
if key not in routes:
|
||||
routes[key] = {'routepath': [],
|
||||
'req': []}
|
||||
|
||||
routes[key]['routepath'] = '/' + self.version + route.routepath
|
||||
routes[key]['req'] = route.reqs
|
||||
routes[key]['action'] = action
|
||||
routes[key]['conditions'] = route.conditions
|
||||
doc = getattr(controller, action).__doc__
|
||||
if doc:
|
||||
json = docutils.core.publish_parts(
|
||||
textwrap.dedent(doc),
|
||||
writer=JSONWriter())
|
||||
routes[key].update(json)
|
||||
routes[key]['classpath'] = '.'.join(
|
||||
[controller.__class__.__module__,
|
||||
controller.__class__.__name__]
|
||||
) + ':' + getattr(controller, action).__name__
|
||||
|
||||
routes = sorted(routes.values(),
|
||||
key=operator.itemgetter('classpath'))
|
||||
return routes
|
|
@ -1,26 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from pecan.hooks import PecanHook
|
||||
|
||||
|
||||
class CORSHook(PecanHook):
|
||||
def after(self, state):
|
||||
state.response.headers['Access-Control-Allow-Origin'] = '*'
|
||||
state.response.headers['Access-Control-Allow-Methods'] = 'GET'
|
||||
state.response.headers['Access-Control-Allow-Headers'] = \
|
||||
'origin, authorization, accept'
|
|
@ -1,940 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
import docutils.core
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import Directive
|
||||
from docutils.parsers.rst import directives
|
||||
import docutils.utils
|
||||
from docutils import writers
|
||||
import six
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
MIME_MAP = {
|
||||
'json': 'application/json',
|
||||
'txt': 'text/plain',
|
||||
'xml': 'application/xml',
|
||||
}
|
||||
|
||||
STATUS_CODE_MAP = {
|
||||
'200': 'Success',
|
||||
'201': 'Created',
|
||||
'202': 'Accepted',
|
||||
'203': 'Non-Authoritative Information',
|
||||
'204': 'No Content',
|
||||
'205': 'Reset Content',
|
||||
'206': 'Partial Content',
|
||||
'300': 'Multiple Choices',
|
||||
'301': 'Moved Permanently',
|
||||
'302': 'Found',
|
||||
'303': 'See Other',
|
||||
'304': 'Not Modified',
|
||||
'400': 'Bad Request',
|
||||
'401': 'Unauthorized',
|
||||
'403': 'Forbidden',
|
||||
'404': 'Not Found',
|
||||
'405': 'Method Not Allowed',
|
||||
'409': 'Conflict',
|
||||
'410': 'Gone',
|
||||
'413': 'Request Entity Too Large',
|
||||
'415': 'Unsupported Media Type',
|
||||
'503': 'Service Unavailable',
|
||||
}
|
||||
|
||||
|
||||
def search_node_parents(node, node_name):
|
||||
parent = node
|
||||
while parent.parent:
|
||||
if parent.tagname == node_name:
|
||||
return node
|
||||
parent = parent.parent
|
||||
if parent.tagname == node_name:
|
||||
return node
|
||||
|
||||
|
||||
class JSONTranslator(nodes.GenericNodeVisitor):
|
||||
def __init__(self, document):
|
||||
nodes.NodeVisitor.__init__(self, document)
|
||||
self.output = {
|
||||
'tags': [],
|
||||
'paths': {}
|
||||
}
|
||||
self.node_stack = []
|
||||
self.node_stack.append(self.output)
|
||||
self.current_node_name = None
|
||||
self.bullet_stack = []
|
||||
self.table_stack = []
|
||||
self.text = ''
|
||||
self.col_num = 0
|
||||
self.first_row = 0
|
||||
self.hyperlink_name = ''
|
||||
self.refuri = ''
|
||||
self.listitem = False
|
||||
self.lit_block = False
|
||||
self.list_indent = 0
|
||||
self.text_res_desc = ''
|
||||
|
||||
def search_stack_for(self, tag_name):
|
||||
for node in self.node_stack:
|
||||
# Skip any list elements, this is a hack, but it' works
|
||||
# for now.
|
||||
if isinstance(node, (list, ) + six.string_types):
|
||||
continue
|
||||
if tag_name in node.keys():
|
||||
return node
|
||||
|
||||
def visit_document(self, node):
|
||||
# Disable both the document visit and depart
|
||||
pass
|
||||
|
||||
def depart_document(self, node):
|
||||
pass
|
||||
|
||||
def default_visit(self, node):
|
||||
"""Default node visit method."""
|
||||
self.current_node_name = node.__class__.__name__
|
||||
if hasattr(node, 'children') and node.children:
|
||||
new_node = {}
|
||||
self.node_stack[-1][self.current_node_name] = new_node
|
||||
self.node_stack.append(new_node)
|
||||
|
||||
def default_departure(self, node):
|
||||
"""Default node depart method."""
|
||||
if hasattr(node, 'children') and node.children:
|
||||
self.node_stack.pop()
|
||||
|
||||
def visit_system_message(self, node):
|
||||
pass
|
||||
|
||||
def depart_system_message(self, node):
|
||||
pass
|
||||
|
||||
def visit_Text(self, node):
|
||||
if self.first_row is 0:
|
||||
if self.lit_block and len(self.bullet_stack) > 0:
|
||||
litblock = node.astext().split('\n')
|
||||
litblock = '\n '.join(litblock)
|
||||
self.text += litblock
|
||||
else:
|
||||
self.text += node.astext()
|
||||
|
||||
def depart_Text(self, node):
|
||||
pass
|
||||
|
||||
def visit_emphasis(self, node):
|
||||
if self.first_row > 0:
|
||||
inlinetxt = self.table_stack.pop()
|
||||
para = inlinetxt.partition(node.astext())
|
||||
new_para = ''
|
||||
new_para += para[0] + '_' + para[1] + '_' + para[2]
|
||||
self.table_stack.append(new_para)
|
||||
else:
|
||||
self.text += '_'
|
||||
|
||||
def depart_emphasis(self, node):
|
||||
if self.first_row is 0:
|
||||
self.text += '_'
|
||||
|
||||
def visit_literal(self, node):
|
||||
if self.first_row > 0:
|
||||
inlinetxt = self.table_stack.pop()
|
||||
para = inlinetxt.partition(node.astext())
|
||||
new_para = ''
|
||||
new_para += para[0] + '`' + para[1] + '`' + para[2]
|
||||
self.table_stack.append(new_para)
|
||||
else:
|
||||
self.text += '`'
|
||||
|
||||
def depart_literal(self, node):
|
||||
if self.first_row is 0:
|
||||
self.text += '`'
|
||||
|
||||
def visit_strong(self, node):
|
||||
if self.first_row > 0:
|
||||
inlinetxt = self.table_stack.pop()
|
||||
para = inlinetxt.partition(node.astext())
|
||||
new_para = ''
|
||||
new_para += para[0] + '**' + para[1] + '**' + para[2]
|
||||
self.table_stack.append(new_para)
|
||||
else:
|
||||
self.text += '**'
|
||||
|
||||
def depart_strong(self, node):
|
||||
if self.first_row is 0:
|
||||
self.text += '**'
|
||||
|
||||
def visit_literal_block(self, node):
|
||||
if len(self.bullet_stack) > 0:
|
||||
self.text += '\n '
|
||||
else:
|
||||
self.text += '```\n'
|
||||
self.lit_block = True
|
||||
|
||||
def depart_literal_block(self, node):
|
||||
if len(self.bullet_stack) > 0:
|
||||
self.text += '\n'
|
||||
else:
|
||||
self.text += '\n```\n'
|
||||
self.lit_block = False
|
||||
|
||||
def visit_bullet_list(self, node):
|
||||
if self.first_row > 0:
|
||||
self.text += """<ul>"""
|
||||
else:
|
||||
self.bullet_stack.append('*')
|
||||
|
||||
def depart_bullet_list(self, node):
|
||||
if self.first_row > 0:
|
||||
self.text += """</ul>"""
|
||||
else:
|
||||
self.bullet_stack.pop()
|
||||
self.list_indent = len(self.bullet_stack) - 1
|
||||
if len(self.bullet_stack) is 0:
|
||||
self.text += '\n'
|
||||
|
||||
def visit_list_item(self, node):
|
||||
if self.first_row > 0:
|
||||
self.text += """<li>"""
|
||||
else:
|
||||
self.list_indent = len(self.bullet_stack) - 1
|
||||
item = '\n%s%s ' % (' ' * self.list_indent,
|
||||
self.bullet_stack[-1])
|
||||
self.text += item
|
||||
self.listitem = True
|
||||
|
||||
def depart_list_item(self, node):
|
||||
if self.first_row > 0:
|
||||
self.text += """</li>"""
|
||||
else:
|
||||
self.listitem = False
|
||||
self.list_indent = 0
|
||||
|
||||
def visit_title(self, node):
|
||||
self.current_node_name = node.__class__.__name__
|
||||
if self.current_node_name not in self.node_stack[-1]:
|
||||
new_node = []
|
||||
self.node_stack[-1][self.current_node_name] = new_node
|
||||
self.node_stack.append(new_node)
|
||||
|
||||
def depart_title(self, node):
|
||||
self.node_stack.pop()
|
||||
|
||||
def visit_paragraph(self, node):
|
||||
if self.first_row > 0:
|
||||
self.table_stack.append(node.astext())
|
||||
else:
|
||||
# listitem text
|
||||
if self.listitem is True:
|
||||
pass
|
||||
else:
|
||||
# another para in listitem
|
||||
if len(self.bullet_stack) > 0:
|
||||
if self.lit_block:
|
||||
self.text += '\n' + ' '
|
||||
else:
|
||||
self.text += '\n' + ' ' * self.list_indent + ' '
|
||||
|
||||
def depart_paragraph(self, node):
|
||||
if self.first_row is 0:
|
||||
if self.listitem:
|
||||
self.text += '\n'
|
||||
self.listitem = False
|
||||
else:
|
||||
if len(self.bullet_stack) > 0:
|
||||
self.text += "\n"
|
||||
else:
|
||||
# default paragraph
|
||||
self.text += "\n\n"
|
||||
else:
|
||||
if self.first_row > 0:
|
||||
para = self.table_stack.pop()
|
||||
para = para.strip('\n')
|
||||
plist = para.split('\n')
|
||||
|
||||
# multi-line text in single column
|
||||
if len(plist) > 0:
|
||||
self.text += """<br>""".join(plist)
|
||||
else:
|
||||
self.text += para
|
||||
|
||||
def visit_line_block(self, node):
|
||||
if isinstance(self.node_stack[-1], list):
|
||||
return
|
||||
|
||||
self.current_node_name = node.__class__.__name__
|
||||
if self.current_node_name not in self.node_stack[-1]:
|
||||
new_node = []
|
||||
self.node_stack[-1][self.current_node_name] = new_node
|
||||
self.node_stack.append(new_node)
|
||||
else:
|
||||
self.node_stack.append(self.node_stack[-1][self.current_node_name])
|
||||
|
||||
def depart_line_block(self, node):
|
||||
if isinstance(self.node_stack[-1], list):
|
||||
self.node_stack.pop()
|
||||
|
||||
def visit_table(self, node):
|
||||
self.col_num = 0
|
||||
|
||||
def depart_table(self, node):
|
||||
self.text += "\n"
|
||||
|
||||
def visit_tbody(self, node):
|
||||
pass
|
||||
|
||||
def depart_tbody(self, node):
|
||||
self.text += "\n"
|
||||
self.first_row = 0
|
||||
self.col_num = 0
|
||||
|
||||
def visit_thead(self, node):
|
||||
pass
|
||||
|
||||
def depart_thead(self, node):
|
||||
pass
|
||||
|
||||
def visit_tgroup(self, node):
|
||||
pass
|
||||
|
||||
def depart_tgroup(self, node):
|
||||
pass
|
||||
|
||||
def visit_colspec(self, node):
|
||||
pass
|
||||
|
||||
def depart_colspec(self, node):
|
||||
pass
|
||||
|
||||
def visit_row(self, node):
|
||||
if self.first_row is 1 and self.col_num > 0:
|
||||
row_separator = [' --- '] * self.col_num
|
||||
self.text += "|"
|
||||
sep_row = "|".join(row_separator)
|
||||
self.text += sep_row
|
||||
self.text += "|"
|
||||
self.text += "\n"
|
||||
|
||||
self.text += "|"
|
||||
self.first_row += 1
|
||||
|
||||
def depart_row(self, node):
|
||||
self.text += "\n"
|
||||
|
||||
def visit_entry(self, node):
|
||||
self.text += " "
|
||||
|
||||
def depart_entry(self, node):
|
||||
self.text += " |"
|
||||
self.col_num += 1
|
||||
|
||||
def visit_definition(self, node):
|
||||
pass
|
||||
|
||||
def depart_definition(self, node):
|
||||
pass
|
||||
|
||||
def visit_definition_list(self, node):
|
||||
pass
|
||||
|
||||
def depart_definition_list(self, node):
|
||||
pass
|
||||
|
||||
def visit_definition_list_item(self, node):
|
||||
pass
|
||||
|
||||
def depart_definition_list_item(self, node):
|
||||
pass
|
||||
|
||||
def visit_term(self, node):
|
||||
self.text += " "
|
||||
if self.first_row is 0:
|
||||
self.text += node.astext()
|
||||
else:
|
||||
self.table_stack.append(node.astext())
|
||||
|
||||
def depart_term(self, node):
|
||||
if self.first_row > 0:
|
||||
self.text += self.table_stack.pop()
|
||||
self.text += """<br>"""
|
||||
|
||||
def visit_reference(self, node):
|
||||
self.hyperlink_name = node.attributes['name']
|
||||
self.refuri = node.attributes['refuri']
|
||||
self.text += '['
|
||||
|
||||
def depart_reference(self, node):
|
||||
if self.hyperlink_name:
|
||||
self.text += ']'
|
||||
self.text += '(' + self.refuri + ')'
|
||||
else:
|
||||
self.text += '[' + self.refuri + ']'
|
||||
|
||||
self.hyperlink_name = ''
|
||||
self.refuri = ''
|
||||
|
||||
def visit_resource(self, node):
|
||||
self.text = ''
|
||||
if 'paths' not in self.node_stack[-1]:
|
||||
self.node_stack[-1]['paths'] = {}
|
||||
self.node_stack.append(self.node_stack[-1]['paths'])
|
||||
|
||||
def depart_resource(self, node):
|
||||
self.node_stack[-1]['description'] = self.text
|
||||
# XXX This is a massive hack, this is here because the visit
|
||||
# resource url functions don't pop the stack.
|
||||
self.node_stack.pop()
|
||||
self.node_stack.pop()
|
||||
|
||||
def visit_resource_url(self, node):
|
||||
url_path = node.astext()
|
||||
node.clear()
|
||||
if url_path not in self.node_stack[-1]:
|
||||
self.node_stack[-1][url_path] = []
|
||||
new_node = {'responses': {},
|
||||
'parameters': [],
|
||||
'description': '',
|
||||
'produces': [],
|
||||
'consumes': [],
|
||||
'tags': []}
|
||||
self.node_stack[-1][url_path].append(new_node)
|
||||
self.node_stack.append(new_node)
|
||||
|
||||
def depart_resource_url(self, node):
|
||||
pass
|
||||
|
||||
def visit_resource_summary(self, node):
|
||||
summary = node.astext()
|
||||
self.node_stack[-1]['summary'] = summary
|
||||
node.clear()
|
||||
|
||||
def depart_resource_summary(self, node):
|
||||
pass
|
||||
|
||||
def visit_resource_title(self, node):
|
||||
title = node.astext()
|
||||
# Should probably be x-title
|
||||
self.node_stack[-1]['title'] = title
|
||||
node.clear()
|
||||
|
||||
def depart_resource_title(self, node):
|
||||
pass
|
||||
|
||||
def visit_resource_method(self, node):
|
||||
method = node.astext()
|
||||
self.node_stack[-1]['method'] = method
|
||||
node.clear()
|
||||
|
||||
def depart_resource_method(self, node):
|
||||
pass
|
||||
|
||||
def visit_field_list(self, node):
|
||||
pass
|
||||
|
||||
def depart_field_list(self, node):
|
||||
pass
|
||||
|
||||
def visit_field(self, node):
|
||||
name = node.attributes['names'][0]
|
||||
resource = self.node_stack[-1]
|
||||
new_response = {'description': ''}
|
||||
# TODO(arrsim) this name matching ignores all the other
|
||||
# possible names that the fields could have.
|
||||
if name == 'statuscode':
|
||||
responses = resource['responses']
|
||||
status_code = node[0].astext()
|
||||
description = node[1].astext()
|
||||
if status_code not in responses:
|
||||
responses[status_code] = new_response
|
||||
if not description and status_code in STATUS_CODE_MAP:
|
||||
description = STATUS_CODE_MAP[status_code]
|
||||
responses[status_code]['description'] = description
|
||||
node.clear()
|
||||
elif name == 'responseexample':
|
||||
responses = resource['responses']
|
||||
status_code = node[0].astext()
|
||||
filepath = node[1].astext()
|
||||
if status_code not in responses:
|
||||
responses[status_code] = new_response
|
||||
ext = filepath.rsplit('.', 1)[1]
|
||||
mimetype = MIME_MAP[ext]
|
||||
if 'examples' not in responses[status_code]:
|
||||
responses[status_code]['examples'] = {}
|
||||
responses[status_code]['examples'][mimetype] = {'$ref': filepath}
|
||||
node.clear()
|
||||
elif name == 'requestexample':
|
||||
status_code = node[0].astext()
|
||||
filepath = node[1].astext()
|
||||
ext = filepath.rsplit('.', 1)[1]
|
||||
mimetype = MIME_MAP[ext]
|
||||
if 'examples' not in resource:
|
||||
resource['examples'] = {}
|
||||
resource['examples'][mimetype] = {'$ref': filepath}
|
||||
node.clear()
|
||||
elif name == 'requestschema':
|
||||
filepath = node[1].astext()
|
||||
resource['parameters'].append(
|
||||
{'name': 'body',
|
||||
'in': 'body',
|
||||
'required': True,
|
||||
'schema': {'$ref': filepath}})
|
||||
node.clear()
|
||||
elif name == 'responseschema':
|
||||
responses = resource['responses']
|
||||
status_code = node[0].astext()
|
||||
filepath = node[1].astext()
|
||||
if status_code not in responses:
|
||||
responses[status_code] = new_response
|
||||
if 'schema' not in responses[status_code]:
|
||||
responses[status_code]['schema'] = {}
|
||||
responses[status_code]['schema'] = {'$ref': filepath}
|
||||
node.clear()
|
||||
elif name == 'parameter':
|
||||
param_name = node[0].astext()
|
||||
description = node[1].astext()
|
||||
resource['parameters'].append(
|
||||
{'name': param_name,
|
||||
'description': description,
|
||||
'in': 'path',
|
||||
'type': 'string',
|
||||
'required': True})
|
||||
node.clear()
|
||||
elif name == 'query':
|
||||
self.text_res_desc = self.text
|
||||
param_name = node[0].astext()
|
||||
self.text = ''
|
||||
description = ''
|
||||
resource['parameters'].append(
|
||||
{'name': param_name,
|
||||
'description': description,
|
||||
'in': 'query',
|
||||
'type': 'string',
|
||||
'required': False})
|
||||
elif name == 'reqheader':
|
||||
self.text_res_desc = self.text
|
||||
param_name = node[0].astext()
|
||||
self.text = ''
|
||||
description = ''
|
||||
resource['parameters'].append(
|
||||
{'name': param_name,
|
||||
'description': description,
|
||||
'in': 'header',
|
||||
'type': 'string',
|
||||
'required': False})
|
||||
elif name == 'tag':
|
||||
tag = node[1].astext()
|
||||
resource['tags'].append(tag)
|
||||
node.clear()
|
||||
elif name == 'accepts':
|
||||
mimetype = node[1].astext()
|
||||
resource['consumes'].append(mimetype)
|
||||
node.clear()
|
||||
elif name == 'produces':
|
||||
mimetype = node[1].astext()
|
||||
resource['produces'].append(mimetype)
|
||||
node.clear()
|
||||
else:
|
||||
node.clear()
|
||||
|
||||
def depart_field(self, node):
|
||||
name = node.attributes['names'][0]
|
||||
resource = self.node_stack[-1]
|
||||
if name == 'query' or name == 'reqheader':
|
||||
param_name = node[0].astext()
|
||||
if self.text.startswith(param_name):
|
||||
resource['parameters'][-1]['description'] \
|
||||
= self.text[len(param_name):]
|
||||
else:
|
||||
resource['parameters'][-1]['description'] = self.text
|
||||
self.text = self.text_res_desc
|
||||
|
||||
def visit_field_name(self, node):
|
||||
self.node_stack[-1]['name'] = node.astext()
|
||||
|
||||
def depart_field_name(self, node):
|
||||
pass
|
||||
|
||||
def visit_field_body(self, node):
|
||||
self.node_stack[-1]['type'] = node.astext()
|
||||
|
||||
def depart_field_body(self, node):
|
||||
pass
|
||||
|
||||
def visit_field_type(self, node):
|
||||
self.node_stack[-1]['type'] = node.astext()
|
||||
|
||||
def depart_field_type(self, node):
|
||||
pass
|
||||
|
||||
def visit_swagger_tag(self, node):
|
||||
self.text = ''
|
||||
self.node_stack.append(self.node_stack[-1]['tags'])
|
||||
new_node = {'name': '',
|
||||
'description': ''}
|
||||
self.node_stack[-1].append(new_node)
|
||||
self.node_stack.append(new_node)
|
||||
|
||||
def depart_swagger_tag(self, node):
|
||||
self.node_stack[-1]['description'] = self.text
|
||||
self.node_stack.pop()
|
||||
self.node_stack.pop()
|
||||
|
||||
def visit_swagger_tag_name(self, node):
|
||||
name = node.astext()
|
||||
node.clear()
|
||||
self.node_stack[-1]['name'] = name
|
||||
|
||||
def depart_swagger_tag_name(self, node):
|
||||
pass
|
||||
|
||||
def visit_swagger_tag_summary(self, node):
|
||||
summary = node.astext()
|
||||
node.clear()
|
||||
self.node_stack[-1]['summary'] = summary
|
||||
|
||||
def depart_swagger_tag_summary(self, node):
|
||||
pass
|
||||
|
||||
|
||||
class JSONWriter(writers.Writer):
|
||||
|
||||
supported = ('json',)
|
||||
"""Formats this writer supports."""
|
||||
|
||||
settings_spec = (
|
||||
'"Docutils JSON" Writer Options',
|
||||
None,
|
||||
[])
|
||||
|
||||
config_section = 'docutils_json writer'
|
||||
config_section_dependencies = ('writers',)
|
||||
|
||||
output = None
|
||||
|
||||
def __init__(self):
|
||||
writers.Writer.__init__(self)
|
||||
self.translator_class = JSONTranslator
|
||||
|
||||
def translate(self):
|
||||
self.visitor = visitor = self.translator_class(self.document)
|
||||
self.document.walkabout(visitor)
|
||||
self.output = visitor.output
|
||||
|
||||
|
||||
class field_type(nodes.Part, nodes.TextElement):
|
||||
pass
|
||||
|
||||
|
||||
class resource(nodes.Inline, nodes.TextElement):
|
||||
pass
|
||||
|
||||
|
||||
class resource_url(nodes.Admonition, nodes.TextElement):
|
||||
pass
|
||||
|
||||
|
||||
class resource_title(nodes.Admonition, nodes.TextElement):
|
||||
pass
|
||||
|
||||
|
||||
class resource_summary(nodes.Admonition, nodes.TextElement):
|
||||
pass
|
||||
|
||||
|
||||
class resource_method(nodes.Admonition, nodes.TextElement):
|
||||
pass
|
||||
|
||||
|
||||
class Field(object):
|
||||
def __init__(self, name, names=(), label=None,
|
||||
has_arg=True, rolename=None):
|
||||
self.name = name
|
||||
self.names = names
|
||||
self.label = label
|
||||
self.has_arg = has_arg
|
||||
self.rolename = rolename
|
||||
|
||||
@classmethod
|
||||
def transform(cls, node):
|
||||
node.attributes['names'].append(node[0].astext())
|
||||
|
||||
|
||||
class TypedField(Field):
|
||||
def __init__(self, name, names=(), label=None,
|
||||
has_arg=True, rolename=None,
|
||||
typerolename='', typenames=()):
|
||||
super(TypedField, self).__init__(
|
||||
name=name,
|
||||
names=names,
|
||||
label=label,
|
||||
has_arg=has_arg,
|
||||
rolename=rolename)
|
||||
self.typerolename = typerolename
|
||||
self.typenames = typenames
|
||||
|
||||
@classmethod
|
||||
def transform(cls, node):
|
||||
split = node[0].rawsource.split(None, 2)
|
||||
type = None
|
||||
if len(split) == 3:
|
||||
name, type, value = split
|
||||
elif len(split) == 2:
|
||||
name, value = split
|
||||
else:
|
||||
raise Exception('Too Few arguments.')
|
||||
node.attributes['names'].append(name)
|
||||
if type:
|
||||
node.insert(1, field_type(type))
|
||||
node[0].replace_self(nodes.field_name(value, value))
|
||||
|
||||
|
||||
class GroupedField(Field):
|
||||
|
||||
@classmethod
|
||||
def transform(cls, node):
|
||||
name, value = node[0].rawsource.split(None, 1)
|
||||
node.attributes['names'].append(name)
|
||||
node[0].replace_self(nodes.field_name(value, value))
|
||||
|
||||
|
||||
class Resource(Directive):
|
||||
|
||||
method = None
|
||||
|
||||
required_arguments = 1
|
||||
optional_arguments = 0
|
||||
has_content = True
|
||||
final_argument_whitespace = True
|
||||
|
||||
doc_field_types = [
|
||||
TypedField('parameter', label='Parameters',
|
||||
names=('param', 'parameter', 'arg', 'argument'),
|
||||
typerolename='obj', typenames=('paramtype', 'type')),
|
||||
TypedField('jsonparameter', label='JSON Parameters',
|
||||
names=('jsonparameter', 'jsonparam', 'json'),
|
||||
typerolename='obj',
|
||||
typenames=('jsonparamtype', 'jsontype')),
|
||||
TypedField('requestjsonobject', label='Request JSON Object',
|
||||
names=('reqjsonobj', 'reqjson', '<jsonobj', '<json'),
|
||||
typerolename='obj', typenames=('reqjsonobj', '<jsonobj')),
|
||||
TypedField('requestjsonarray', label='Request JSON Array of Objects',
|
||||
names=('reqjsonarr', '<jsonarr'),
|
||||
typerolename='obj',
|
||||
typenames=('reqjsonarrtype', '<jsonarrtype')),
|
||||
TypedField('responsejsonobject', label='Response JSON Object',
|
||||
names=('resjsonobj', 'resjson', '>jsonobj', '>json'),
|
||||
typerolename='obj', typenames=('resjsonobj', '>jsonobj')),
|
||||
TypedField('responsejsonarray', label='Response JSON Array of Objects',
|
||||
names=('resjsonarr', '>jsonarr'),
|
||||
typerolename='obj',
|
||||
typenames=('resjsonarrtype', '>jsonarrtype')),
|
||||
TypedField('queryparameter', label='Query Parameters',
|
||||
names=('queryparameter', 'queryparam', 'qparam', 'query'),
|
||||
typerolename='obj',
|
||||
typenames=('queryparamtype', 'querytype', 'qtype')),
|
||||
GroupedField('formparameter', label='Form Parameters',
|
||||
names=('formparameter', 'formparam', 'fparam', 'form')),
|
||||
GroupedField('requestheader', label='Request Headers',
|
||||
rolename='header',
|
||||
names=('<header', 'reqheader', 'requestheader')),
|
||||
GroupedField('responseheader', label='Response Headers',
|
||||
rolename='header',
|
||||
names=('>header', 'resheader', 'responseheader')),
|
||||
GroupedField('statuscode', label='Status Codes',
|
||||
rolename='statuscode',
|
||||
names=('statuscode', 'status', 'code')),
|
||||
GroupedField('responseschema', label='Response Schema',
|
||||
rolename='responseschema',
|
||||
names=('reponse-schema', 'responseschema')),
|
||||
|
||||
# Swagger Extensions
|
||||
GroupedField('responseexample', label='Response Example',
|
||||
rolename='responseexample',
|
||||
names=('swagger-response', 'responseexample')),
|
||||
Field('requestexample', label='Request Example',
|
||||
rolename='requestexample',
|
||||
names=('swagger-request', 'requestexample')),
|
||||
Field('requestschema', label='Request Schema',
|
||||
rolename='requestschema',
|
||||
names=('swagger-schema', 'requestschema')),
|
||||
Field('tag',
|
||||
label='Swagger Tag',
|
||||
rolename='tag',
|
||||
names=('swagger-tag', 'tag')),
|
||||
Field('accepts',
|
||||
label='Swagger Consumes',
|
||||
rolename='accepts',
|
||||
names=('swagger-accepts', 'accepts')),
|
||||
Field('produces',
|
||||
label='Swagger Consumes',
|
||||
rolename='produces',
|
||||
names=('swagger-produces', 'produces'))
|
||||
]
|
||||
|
||||
option_spec = {
|
||||
'title': lambda x: x,
|
||||
'synopsis': lambda x: x,
|
||||
}
|
||||
|
||||
def transform_fields(self):
|
||||
return {name: f
|
||||
for f in self.doc_field_types
|
||||
for name in f.names}
|
||||
|
||||
def run(self):
|
||||
node = resource()
|
||||
self.state.nested_parse(self.content, self.content_offset, node)
|
||||
fields = self.transform_fields()
|
||||
|
||||
# This is the first line of the definition.
|
||||
url = self.arguments[0]
|
||||
node.insert(0, resource_url(url, url))
|
||||
|
||||
if not node.children:
|
||||
return [node]
|
||||
|
||||
if node[0].tagname == 'system_message':
|
||||
logger.error(node[0].astext())
|
||||
node.remove(node[0])
|
||||
|
||||
# Method
|
||||
node.insert(1, resource_method(self.method, self.method))
|
||||
|
||||
# Summary
|
||||
summary = self.options.get('synopsis', '')
|
||||
node.insert(1, resource_summary(summary, summary))
|
||||
title = self.options.get('title', '')
|
||||
node.insert(1, resource_title(title, title))
|
||||
|
||||
# Generate field lists
|
||||
for child in node:
|
||||
if isinstance(child, nodes.field_list):
|
||||
for field in child:
|
||||
name = field[0].rawsource.split(None, 1)[0]
|
||||
fields[name].transform(field)
|
||||
return [node]
|
||||
|
||||
|
||||
class HTTPGet(Resource):
|
||||
|
||||
method = 'get'
|
||||
|
||||
|
||||
class HTTPPost(Resource):
|
||||
|
||||
method = 'post'
|
||||
|
||||
|
||||
class HTTPPut(Resource):
|
||||
|
||||
method = 'put'
|
||||
|
||||
|
||||
class HTTPPatch(Resource):
|
||||
|
||||
method = 'patch'
|
||||
|
||||
|
||||
class HTTPOptions(Resource):
|
||||
|
||||
method = 'options'
|
||||
|
||||
|
||||
class HTTPHead(Resource):
|
||||
|
||||
method = 'head'
|
||||
|
||||
|
||||
class HTTPDelete(Resource):
|
||||
|
||||
method = 'delete'
|
||||
|
||||
|
||||
class HTTPCopy(Resource):
|
||||
|
||||
method = 'copy'
|
||||
|
||||
|
||||
directives.register_directive('http:get', HTTPGet)
|
||||
directives.register_directive('http:post', HTTPPost)
|
||||
directives.register_directive('http:put', HTTPPut)
|
||||
directives.register_directive('http:patch', HTTPPatch)
|
||||
directives.register_directive('http:options', HTTPOptions)
|
||||
directives.register_directive('http:head', HTTPHead)
|
||||
directives.register_directive('http:delete', HTTPDelete)
|
||||
directives.register_directive('http:copy', HTTPCopy)
|
||||
|
||||
|
||||
class swagger_tag(nodes.Inline, nodes.TextElement):
|
||||
pass
|
||||
|
||||
|
||||
class swagger_tag_name(nodes.Inline, nodes.TextElement):
|
||||
pass
|
||||
|
||||
|
||||
class swagger_tag_summary(nodes.Inline, nodes.TextElement):
|
||||
pass
|
||||
|
||||
|
||||
class SwaggerTag(Directive):
|
||||
|
||||
method = None
|
||||
|
||||
required_arguments = 0
|
||||
optional_arguments = 0
|
||||
has_content = True
|
||||
final_argument_whitespace = True
|
||||
|
||||
option_spec = {
|
||||
'synopsis': lambda x: x,
|
||||
}
|
||||
|
||||
def run(self):
|
||||
node = swagger_tag()
|
||||
self.state.nested_parse(self.content, self.content_offset, node)
|
||||
|
||||
# This is the first line of the definition.
|
||||
name = node[0].astext()
|
||||
node[0].replace_self(swagger_tag_name(name, name))
|
||||
|
||||
# Summary
|
||||
summary = self.options.get('synopsis', '')
|
||||
node.insert(1, swagger_tag_summary(summary, summary))
|
||||
|
||||
return [node]
|
||||
|
||||
|
||||
directives.register_directive('swagger:tag', SwaggerTag)
|
||||
|
||||
|
||||
class error_writer(object):
|
||||
|
||||
def write(self, line):
|
||||
logger.warning(line.strip())
|
||||
|
||||
|
||||
def publish_string(string):
|
||||
settings_overrides = {'warning_stream': error_writer()}
|
||||
return docutils.core.publish_string(
|
||||
string, writer=JSONWriter(),
|
||||
settings_overrides=settings_overrides)
|
|
@ -1,12 +0,0 @@
|
|||
<%inherit file="layout.html" />
|
||||
|
||||
## provide definitions for blocks we want to redefine
|
||||
<%def name="title()">
|
||||
Server Error ${status}
|
||||
</%def>
|
||||
|
||||
## now define the body of the template
|
||||
<header>
|
||||
<h1>Server Error ${status}</h1>
|
||||
</header>
|
||||
<p>${message}</p>
|
|
@ -1,28 +0,0 @@
|
|||
<%inherit file="layout.html" />
|
||||
<%def name="title()">
|
||||
Fairy-Slipper
|
||||
</%def>
|
||||
|
||||
<%def name="style()">
|
||||
<link rel="stylesheet" href="components/bootstrap/dist/css/bootstrap.min.css">
|
||||
<link rel="stylesheet" href="components/angular-swagger-ui/dist/css/swagger-ui.min.css">
|
||||
</%def>
|
||||
|
||||
<%def name="javascript()">
|
||||
<script src="components/angular/angular.js"></script>
|
||||
<script src="components/angular-route/angular-route.js"></script>
|
||||
<script src="components/angular-swagger-ui/dist/scripts/swagger-ui.js"></script>
|
||||
<script src="app.js"></script>
|
||||
<script src="browser/browser.js"></script>
|
||||
</%def>
|
||||
|
||||
<ul class="menu">
|
||||
<li><a href="#/view1">view1</a></li>
|
||||
<li><a href="#/view2">view2</a></li>
|
||||
</ul>
|
||||
|
||||
<!--[if lt IE 7]>
|
||||
<p class="browsehappy">You are using an <strong>outdated</strong> browser. Please <a href="http://browsehappy.com/">upgrade your browser</a> to improve your experience.</p>
|
||||
<![endif]-->
|
||||
|
||||
<div ng-view></div>
|
|
@ -1,30 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<!--[if lt IE 7]> <html lang="en" ng-app="fairySlipper" class="no-js lt-ie9 lt-ie8 lt-ie7"> <![endif]-->
|
||||
<!--[if IE 7]> <html lang="en" ng-app="fairySlipper" class="no-js lt-ie9 lt-ie8"> <![endif]-->
|
||||
<!--[if IE 8]> <html lang="en" ng-app="fairySlipper" class="no-js lt-ie9"> <![endif]-->
|
||||
<!--[if gt IE 8]><!--> <html lang="en" ng-app="fairySlipper" class="no-js"> <!--<![endif]-->
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<title>${self.title()}</title>
|
||||
<meta name="description" content="">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
${self.style()}
|
||||
${self.javascript()}
|
||||
</head>
|
||||
<body>
|
||||
${self.body()}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
<%def name="title()">
|
||||
Default Title
|
||||
</%def>
|
||||
|
||||
<%def name="style()">
|
||||
|
||||
</%def>
|
||||
|
||||
<%def name="javascript()">
|
||||
|
||||
</%def>
|
|
@ -1,59 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
from unittest import TestCase
|
||||
|
||||
from pecan import set_config
|
||||
from pecan.testing import load_test_app
|
||||
|
||||
__all__ = ['FunctionalTest']
|
||||
|
||||
|
||||
class FunctionalTest(TestCase):
|
||||
"""Used for functional tests
|
||||
|
||||
Provides a literal application and for testing its integration
|
||||
with the framework.
|
||||
"""
|
||||
CONFIG = {
|
||||
'server': {
|
||||
'port': '8080',
|
||||
'host': '0.0.0.0'
|
||||
},
|
||||
'app': {
|
||||
'root': 'fairy_slipper.controllers.root.RootController',
|
||||
'modules': ['fairy_slipper'],
|
||||
'static_root': '%(confdir)s/public',
|
||||
'api_doc': '%(confdir)s/fairy_slipper/tests/api_doc_fixture',
|
||||
'template_path': '%(confdir)s/templates',
|
||||
'debug': True,
|
||||
'errors': {
|
||||
'404': '/error/404',
|
||||
'__force_dict__': True
|
||||
}
|
||||
}}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.CONFIG = copy.deepcopy(self.CONFIG)
|
||||
super(FunctionalTest, self).__init__(*args, **kwargs)
|
||||
|
||||
def setUp(self):
|
||||
self.app = load_test_app(copy.deepcopy(self.CONFIG))
|
||||
|
||||
def tearDown(self):
|
||||
set_config({}, overwrite=True)
|
|
@ -1,3 +0,0 @@
|
|||
.. swagger:tag:: simple
|
||||
:synopsis: Simple Tag
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
.. http:get:: /
|
||||
:title: Simple route
|
||||
|
||||
:tag: simple
|
||||
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
{
|
||||
"identity/v2/": {
|
||||
"version": "v2",
|
||||
"license": {
|
||||
"url": "http://www.apache.org/licenses/LICENSE-2.0.html",
|
||||
"name": "Apache 2.0"
|
||||
},
|
||||
"service": "identity",
|
||||
"title": "Identity"
|
||||
}
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2010-2011 OpenStack Foundation
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslotest import base
|
||||
|
||||
|
||||
class TestCase(base.BaseTestCase):
|
||||
|
||||
"""Test case base class for all unit tests."""
|
|
@ -1,15 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<chapter>
|
||||
<section xml:id="link-v1">
|
||||
<para>
|
||||
To create a keypair, make a <link
|
||||
xlink:href="http://developer.openstack.org/#createKeypair">
|
||||
create keypair</link> request.
|
||||
</para>
|
||||
<para>
|
||||
To test a link that ends the sentence, make a <link
|
||||
xlink:href="http://developer.openstack.org/#createKeypair">
|
||||
create keypair</link>.
|
||||
</para>
|
||||
</section>
|
||||
</chapter>
|
|
@ -1,25 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<chapter>
|
||||
<section xml:id="listitems-v1">
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Para 1, listitem1</para>
|
||||
<para>Para 2, listitem1</para>
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Embedded item1</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>Embedded item2</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>Embedded item3</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
<para>Para 3, listitem1</para>
|
||||
</listitem>
|
||||
<listitem><para>Para1, listitem2</para></listitem>
|
||||
</itemizedlist>
|
||||
<para>some more para text</para>
|
||||
</section>
|
||||
</chapter>
|
|
@ -1,12 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<chapter>
|
||||
<section xml:id="table-v3">
|
||||
<para>Creates, lists, updates images.</para>
|
||||
<table rules="all" frame="border">
|
||||
<caption>Image status</caption>
|
||||
</table>
|
||||
<table rules="all" frame="border">
|
||||
<caption>Image <emphasis>with embedded bold</emphasis> status</caption>
|
||||
</table>
|
||||
</section>
|
||||
</chapter>
|
|
@ -1,11 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<chapter>
|
||||
<section xml:id="test-v1">
|
||||
<para>Image operations <code>show</code> all fields.</para>
|
||||
<para>Creates, lists, updates, and deletes images.</para>
|
||||
<para>Creates, <emphasis>lists</emphasis>, <code>updates</code>
|
||||
and <emphasis>deletes images</emphasis></para>
|
||||
<para>Creates, (<code>x+5</code>), and deletes <emphasis>
|
||||
images</emphasis>.</para>
|
||||
</section>
|
||||
</chapter>
|
|
@ -1,22 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<chapter>
|
||||
<section xml:id="test-v3">
|
||||
<para>
|
||||
You can encode sets into a blob.
|
||||
Do something with <code>type</code> to
|
||||
<code>application/json</code> and JSON strings in a
|
||||
<code>blob</code>. Example:
|
||||
</para>
|
||||
<programlisting>"blob": {
|
||||
"default": false
|
||||
}</programlisting>
|
||||
<para>
|
||||
Or:
|
||||
</para>
|
||||
<programlisting> "blob": {
|
||||
"foobar_user": [
|
||||
"role:compute-user"
|
||||
]
|
||||
}</programlisting>
|
||||
</section>
|
||||
</chapter>
|
|
@ -1,155 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
from unittest import TestCase
|
||||
import xml.sax
|
||||
|
||||
from fairy_slipper.cmd import docbkx_to_json
|
||||
|
||||
|
||||
class TestChapterParaParser(TestCase):
|
||||
|
||||
def test_para_inline_code(self):
|
||||
filename = "test-file.xml"
|
||||
test_filename = os.path.dirname(os.path.abspath(__file__))
|
||||
test_filename += "/ch_test-v1.xml"
|
||||
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<book xml:id="test-v1" version="1">
|
||||
<xi:include href="%s"/>
|
||||
</book>
|
||||
""" % (test_filename)
|
||||
|
||||
ch = docbkx_to_json.APIRefContentHandler(filename)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
|
||||
self.assertEqual(
|
||||
ch.tags,
|
||||
[{
|
||||
'name': 'test-v1',
|
||||
'summary': "Image operations ``show`` all fields.\n"
|
||||
"\nCreates, lists, updates, and deletes images.\n"
|
||||
"\nCreates, **lists**, ``updates`` and **deletes images**"
|
||||
"\n\nCreates, (``x+5``), and deletes **images**."}]
|
||||
)
|
||||
|
||||
def test_code_block(self):
|
||||
filename = "test-file.xml"
|
||||
test_filename = os.path.dirname(os.path.abspath(__file__))
|
||||
test_filename += "/ch_test-v3.xml"
|
||||
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<book xml:id="test-v3" version="3">
|
||||
<xi:include href="%s"/>
|
||||
</book>
|
||||
""" % (test_filename)
|
||||
|
||||
ch = docbkx_to_json.APIRefContentHandler(filename)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
|
||||
self.assertEqual(
|
||||
ch.tags,
|
||||
[{
|
||||
'name': 'test-v3',
|
||||
'summary': "You can encode sets into a blob. Do something with ``type`` to\n``application/json`` and JSON strings in a ``blob``. Example:\n\n::\n\n \"blob\": {\n \"default\": false\n }\n\nOr:\n\n::\n\n \"blob\": {\n \"foobar_user\": [\n \"role:compute-user\"\n ]\n }" # noqa
|
||||
}]
|
||||
)
|
||||
|
||||
def test_table_caption(self):
|
||||
filename = "test-file.xml"
|
||||
test_filename = os.path.dirname(os.path.abspath(__file__))
|
||||
test_filename += "/ch_test-table.xml"
|
||||
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<book xml:id="table-v3" version="1">
|
||||
<xi:include href="%s"/>
|
||||
</book>
|
||||
""" % (test_filename)
|
||||
|
||||
ch = docbkx_to_json.APIRefContentHandler(filename)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
|
||||
self.assertEqual(
|
||||
ch.tags,
|
||||
[{
|
||||
'name': 'table-v3',
|
||||
'summary': "Creates, lists, updates images.\n"
|
||||
"\n**Image status**\n\n++\n"
|
||||
"\n**Image with embedded bold status**\n\n++"
|
||||
}]
|
||||
)
|
||||
|
||||
def test_nested_listitems(self):
|
||||
filename = "test-file.xml"
|
||||
test_filename = os.path.dirname(os.path.abspath(__file__))
|
||||
test_filename += "/ch_test-listitems.xml"
|
||||
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<book xml:id="listitems-v1" version="1">
|
||||
<xi:include href="%s"/>
|
||||
</book>
|
||||
""" % (test_filename)
|
||||
|
||||
ch = docbkx_to_json.APIRefContentHandler(filename)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
|
||||
self.assertEqual(
|
||||
ch.tags,
|
||||
[{
|
||||
'name': 'listitems-v1',
|
||||
'summary': "- Para 1, listitem1\n\n Para 2, listitem1\n\n"
|
||||
" - Embedded item1\n\n - Embedded item2\n"
|
||||
"\n - Embedded item3\n\n"
|
||||
" Para 3, listitem1\n\n- Para1, listitem2\n"
|
||||
"\nsome more para text"
|
||||
}]
|
||||
)
|
||||
|
||||
def test_link(self):
|
||||
filename = "test-file.xml"
|
||||
test_filename = os.path.dirname(os.path.abspath(__file__))
|
||||
test_filename += "/ch_test-link.xml"
|
||||
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<book xml:id="link-v1" version="1">
|
||||
<xi:include href="%s"/>
|
||||
</book>
|
||||
""" % (test_filename)
|
||||
|
||||
ch = docbkx_to_json.APIRefContentHandler(filename)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
|
||||
self.assertEqual(
|
||||
ch.tags,
|
||||
[{
|
||||
'name': 'link-v1',
|
||||
'summary': "To create a keypair, make a "
|
||||
"`create keypair"
|
||||
"\n<http://developer.openstack.org/#createKeypair>`_"
|
||||
" request.\n\n"
|
||||
"To test a link that ends the sentence, make a "
|
||||
"`create keypair\n"
|
||||
"<http://developer.openstack.org/#createKeypair>`_."
|
||||
}]
|
||||
)
|
|
@ -1,204 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
import unittest
|
||||
|
||||
from fairy_slipper.cmd import tempest_log
|
||||
|
||||
|
||||
SIMPLE_LOG = """2015-09-04 15:51:29.023 18793 DEBUG tempest_lib.common.rest_client [req-30784c0a-e9a1-4411-a7c1-20715b26598f ] Request (FlavorsV2TestJSON:setUpClass): 200 POST http://192.168.122.201:5000/v2.0/tokens
|
||||
2015-09-04 15:51:29.023 18793 DEBUG tempest_lib.common.rest_client [req-30784c0a-e9a1-4411-a7c1-20715b26598f ] Request - Headers: {}
|
||||
Body: None
|
||||
Response - Headers: {'status': '200', 'content-length': '2987', 'vary': 'X-Auth-Token', 'server': 'Apache/2.4.7 (Ubuntu)', 'connection': 'close', 'date': 'Sun, 13 Sep 2015 07:43:01 GMT', 'content-type': 'application/json', 'x-openstack-request-id': 'req-1'}
|
||||
Body: None
|
||||
2015-09-04 15:51:45.472 18793 INFO tempest_lib.common.rest_client [req-b710aeba-6263-4a49-bf50-2da42227c870 ] Request (FlavorsV2TestJSON:test_get_flavor): 200 POST http://192.168.122.201:5000/v2.0/tokens
|
||||
2015-09-04 15:51:45.472 18793 DEBUG tempest_lib.common.rest_client [req-b710aeba-6263-4a49-bf50-2da42227c870 ] Request - Headers: {}
|
||||
Body: None
|
||||
Response - Headers: {'status': '200', 'content-length': '2987', 'vary': 'X-Auth-Token', 'server': 'Apache/2.4.7 (Ubuntu)', 'connection': 'close', 'date': 'Sun, 13 Sep 2015 07:43:01 GMT', 'content-type': 'application/json', 'x-openstack-request-id': 'req-2'}
|
||||
Body: None
|
||||
""" # noqa
|
||||
|
||||
SIMPLE_LOG_BODY = """2015-09-04 15:51:29.007 18793 INFO tempest_lib.common.rest_client [req-9e329507-e0ce-448c-a363-f49e39dd96b0 ] Request (FlavorsV2TestJSON:test_get_flavor): 200 GET http://192.168.122.201:8774/v2.1/6b45254f6f7c44a1b65ddb8218932226/flavors/1 0.117s
|
||||
2015-09-04 15:51:29.007 18793 DEBUG tempest_lib.common.rest_client [req-9e329507-e0ce-448c-a363-f49e39dd96b0 ] Request - Headers: {'Content-Type': 'application/json', 'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}
|
||||
Body: None
|
||||
Response - Headers: {'status': '200', 'content-length': '430', 'content-location': 'http://192.168.122.201:8774/v2.1/6b45254f6f7c44a1b65ddb8218932226/flavors/1', 'x-compute-request-id': 'req-959a09e8-3628-419d-964a-1be4ca604232', 'vary': 'X-OpenStack-Nova-API-Version', 'connection': 'close', 'x-openstack-nova-api-version': '2.1', 'date': 'Sun, 13 Sep 2015 07:43:01 GMT', 'content-type': 'application/json'}
|
||||
Body: {"flavor": {"name": "m1.tiny", "links": [{"href": "http://192.168.122.201:8774/v2.1/6b45254f6f7c44a1b65ddb8218932226/flavors/1", "rel": "self"}, {"href": "http://192.168.122.201:8774/6b45254f6f7c44a1b65ddb8218932226/flavors/1", "rel": "bookmark"}], "ram": 512, "OS-FLV-DISABLED:disabled": false, "vcpus": 1, "swap": "", "os-flavor-access:is_public": true, "rxtx_factor": 1.0, "OS-FLV-EXT-DATA:ephemeral": 0, "disk": 1, "id": "1"}}
|
||||
""" # noqa
|
||||
|
||||
DEBUG_LOG = """2015-09-04 15:54:42.296 18793 INFO tempest_lib.common.rest_client [req-39c6042e-5a4a-4517-9fe9-32b34cfaa5a8 ] Request (TestSessionsTenantIsolation:test_delete_session_in_env_from_another_tenant): 403 DELETE http://127.0.0.1:8082/v1/environments/7501923609b145ec88eeb4a5c93e371c/sessions/db214e36e0494c4e9dc67fb0df8548f7 0.010s
|
||||
2015-09-04 15:54:42.296 18793 DEBUG tempest_lib.common.rest_client [req-39c6042e-5a4a-4517-9fe9-32b34cfaa5a8 ] Request - Headers: {'Content-Type': 'application/json', 'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}
|
||||
Body: None
|
||||
Response - Headers: {'status': '403', 'content-length': '75', 'connection': 'close', 'date': 'Fri, 04 Sep 2015 15:54:42 GMT', 'content-type': 'text/plain; charset=UTF-8', 'x-openstack-request-id': 'req-39c6042e-5a4a-4517-9fe9-32b34cfaa5a8'}
|
||||
Body: 403 Forbidden
|
||||
|
||||
User is not authorized to access these tenant resources
|
||||
|
||||
_log_request_full /opt/stack/new/tempest/.venv/local/lib/python2.7/site-packages/tempest_lib/common/rest_client.py:411
|
||||
2015-09-04 15:52:13.727 18793 INFO tempest_lib.common.rest_client [req-0ff36a16-dacd-49c8-9835-7ce92d50f5a7 ] Request (TestEnvironmentsTenantIsolation:tearDown): 200 DELETE http://127.0.0.1:8082/v1/environments/c32c6d5095c4476da549ed065e9b5196 0.054s
|
||||
2015-09-04 15:52:13.727 18793 DEBUG tempest_lib.common.rest_client [req-0ff36a16-dacd-49c8-9835-7ce92d50f5a7 ] Request - Headers: {'Content-Type': 'application/json', 'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}
|
||||
Body: None
|
||||
Response - Headers: {'status': '200', 'content-length': '0', 'connection': 'close', 'date': 'Fri, 04 Sep 2015 15:52:13 GMT', 'content-type': 'application/json', 'x-openstack-request-id': 'req-0ff36a16-dacd-49c8-9835-7ce92d50f5a7'}
|
||||
Body: _log_request_full /opt/stack/new/tempest/.venv/local/lib/python2.7/site-packages/tempest_lib/common/rest_client.py:411
|
||||
""" # noqa
|
||||
|
||||
|
||||
DEBUG_LOG_AUTH = """2015-09-04 15:49:46.056 14923 INFO tempest_lib.common.rest_client [req-280bc347-e650-473e-92bb-bcc59103e12c ] Request (main): 200 POST http://127.0.0.1:5000/v2.0/tokens
|
||||
2015-09-04 15:49:46.056 14923 DEBUG tempest_lib.common.rest_client [req-280bc347-e650-473e-92bb-bcc59103e12c ] Request - Headers: {}
|
||||
Body: None
|
||||
Response - Headers: {'server': 'Apache/2.4.7 (Ubuntu)', 'vary': 'X-Auth-Token', 'x-openstack-request-id': 'req-280bc347-e650-473e-92bb-bcc59103e12c', 'content-length': '4846', 'connection': 'close', 'status': '200', 'content-type': 'application/json', 'date': 'Fri, 04 Sep 2015 15:49:42 GMT'}
|
||||
Body: None _log_request_full /opt/stack/new/tempest/.tox/venv/local/lib/python2.7/site-packages/tempest_lib/common/rest_client.py:411
|
||||
""" # noqa
|
||||
|
||||
|
||||
def db_to_call_list(db):
|
||||
calls = []
|
||||
for req in sorted(db.requests):
|
||||
calls.append((db.requests[req], db.responses[req]))
|
||||
return calls
|
||||
|
||||
|
||||
class TestLogParser(unittest.TestCase):
|
||||
maxDiff = 10000
|
||||
|
||||
def test_simple_parse(self):
|
||||
result = db_to_call_list(
|
||||
tempest_log.parse_logfile(StringIO(SIMPLE_LOG)))
|
||||
self.assertEqual(result, [
|
||||
({'url': '/v2.0/tokens',
|
||||
'service': 'identity',
|
||||
'headers': {},
|
||||
'body': None,
|
||||
'method': 'POST'},
|
||||
{'status_code': '200',
|
||||
'body': None,
|
||||
'headers': {'status': '200',
|
||||
'content-length': '0',
|
||||
'date': 'Sun, 13 Sep 2015 07:43:01 GMT',
|
||||
'content-type': 'application/json',
|
||||
'x-openstack-request-id': 'req-1',
|
||||
'vary': 'X-Auth-Token',
|
||||
'connection': 'close',
|
||||
'server': 'Apache/2.4.7 (Ubuntu)'}}),
|
||||
({'url': '/v2.0/tokens',
|
||||
'service': 'identity',
|
||||
'headers': {},
|
||||
'body': None,
|
||||
'method': 'POST'},
|
||||
{'status_code': '200',
|
||||
'body': None,
|
||||
'headers': {'status': '200',
|
||||
'content-length': '0',
|
||||
'date': 'Sun, 13 Sep 2015 07:43:01 GMT',
|
||||
'content-type': 'application/json',
|
||||
'x-openstack-request-id': 'req-2',
|
||||
'vary': 'X-Auth-Token',
|
||||
'connection': 'close',
|
||||
'server': 'Apache/2.4.7 (Ubuntu)'}})])
|
||||
|
||||
def test_body_parse(self):
|
||||
result = db_to_call_list(
|
||||
tempest_log.parse_logfile(StringIO(SIMPLE_LOG_BODY)))
|
||||
|
||||
self.assertEqual(result, [
|
||||
({'url': '/v2.1/6b45254f6f7c44a1b65ddb8218932226/flavors/1',
|
||||
'headers': {'content-type': 'application/json',
|
||||
'content-length': '0',
|
||||
'accept': 'application/json',
|
||||
'x-auth-token': '<omitted>'},
|
||||
'body': None,
|
||||
'method': 'GET',
|
||||
'service': 'compute'},
|
||||
{'body': '{\n "flavor": {\n "OS-FLV-DISABLED:disabled": false,\n "OS-FLV-EXT-DATA:ephemeral": 0,\n "disk": 1,\n "id": "1",\n "links": [\n {\n "href": "http://192.168.122.201:8774/v2.1/6b45254f6f7c44a1b65ddb8218932226/flavors/1",\n "rel": "self"\n },\n {\n "href": "http://192.168.122.201:8774/6b45254f6f7c44a1b65ddb8218932226/flavors/1",\n "rel": "bookmark"\n }\n ],\n "name": "m1.tiny",\n "os-flavor-access:is_public": true,\n "ram": 512,\n "rxtx_factor": 1.0,\n "swap": "",\n "vcpus": 1\n }\n}', # noqa
|
||||
'status_code': '200',
|
||||
'headers': {'status': '200', 'content-length': '548',
|
||||
'content-location': 'http://192.168.122.201:8774/v2.1/6b45254f6f7c44a1b65ddb8218932226/flavors/1', # noqa
|
||||
'x-openstack-nova-api-version': '2.1',
|
||||
'date': 'Sun, 13 Sep 2015 07:43:01 GMT',
|
||||
'vary': 'X-OpenStack-Nova-API-Version',
|
||||
'x-compute-request-id': 'req-959a09e8-3628-419d-964a-1be4ca604232', # noqa
|
||||
'content-type': 'application/json',
|
||||
'connection': 'close'}})])
|
||||
|
||||
def test_debug_log(self):
|
||||
result = db_to_call_list(
|
||||
tempest_log.parse_logfile(StringIO(DEBUG_LOG)))
|
||||
|
||||
self.assertEqual(result, [
|
||||
({'body': None,
|
||||
'headers': {'accept': 'application/json',
|
||||
'content-type': 'application/json',
|
||||
'content-length': '0',
|
||||
'x-auth-token': '<omitted>'},
|
||||
'method': 'DELETE',
|
||||
'service': 'application-catalog',
|
||||
'url': '/v1/environments/c32c6d5095c4476da549ed065e9b5196'},
|
||||
{'body': None,
|
||||
'headers': {'connection': 'close',
|
||||
'content-length': '0',
|
||||
'content-type': 'application/json',
|
||||
'date': 'Fri, 04 Sep 2015 15:52:13 GMT',
|
||||
'status': '200',
|
||||
'x-openstack-request-id':
|
||||
'req-0ff36a16-dacd-49c8-9835-7ce92d50f5a7'},
|
||||
'status_code': '200'}),
|
||||
({'body': None,
|
||||
'headers': {'accept': 'application/json',
|
||||
'content-type': 'application/json',
|
||||
'content-length': '0',
|
||||
'x-auth-token': '<omitted>'},
|
||||
'method': 'DELETE',
|
||||
'service': 'application-catalog',
|
||||
'url': '/v1/environments/7501923609b145ec88eeb4a5c93e371c'
|
||||
'/sessions/db214e36e0494c4e9dc67fb0df8548f7'},
|
||||
{'body': '403 Forbidden\n'
|
||||
'User is not authorized to access these tenant resources\n\n',
|
||||
'headers': {'connection': 'close',
|
||||
'content-length': '13',
|
||||
'content-type': 'text/plain; charset=UTF-8',
|
||||
'date': 'Fri, 04 Sep 2015 15:54:42 GMT',
|
||||
'status': '403',
|
||||
'x-openstack-request-id':
|
||||
'req-39c6042e-5a4a-4517-9fe9-32b34cfaa5a8'},
|
||||
'status_code': '403'})])
|
||||
|
||||
def test_debug_admin_log(self):
|
||||
result = db_to_call_list(
|
||||
tempest_log.parse_logfile(StringIO(DEBUG_LOG_AUTH)))
|
||||
|
||||
self.assertEqual(result, [
|
||||
({'body': None,
|
||||
'headers': {},
|
||||
'method': 'POST',
|
||||
'service': 'identity',
|
||||
'url': '/v2.0/tokens'},
|
||||
{'body': None,
|
||||
'headers': {'connection': 'close',
|
||||
'content-length': '0',
|
||||
'content-type': 'application/json',
|
||||
'date': 'Fri, 04 Sep 2015 15:49:42 GMT',
|
||||
'server': 'Apache/2.4.7 (Ubuntu)',
|
||||
'status': '200',
|
||||
'vary': 'X-Auth-Token',
|
||||
'x-openstack-request-id':
|
||||
'req-280bc347-e650-473e-92bb-bcc59103e12c'},
|
||||
'status_code': '200'})])
|
|
@ -1,595 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
import unittest
|
||||
import xml.sax
|
||||
|
||||
from fairy_slipper.cmd import wadl_to_swagger
|
||||
|
||||
|
||||
class MockParent(object):
|
||||
result = None
|
||||
rest = None
|
||||
|
||||
def detach_subparser(self, result, **kwargs):
|
||||
self.result = result
|
||||
self.kwargs = kwargs
|
||||
|
||||
|
||||
class TestParaParser(unittest.TestCase):
|
||||
|
||||
def test_code_block(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>This is an example request:</para>
|
||||
<programlisting>GET /v2.0/routers/{router_id}
|
||||
Accept: application/json</programlisting>
|
||||
<para>para2</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""This is an example request:
|
||||
|
||||
::
|
||||
|
||||
GET /v2.0/routers/{router_id}
|
||||
Accept: application/json
|
||||
|
||||
para2
|
||||
|
||||
""")
|
||||
|
||||
def test_code_block_language(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>This is an example request:</para>
|
||||
<programlisting language="json">"OS-OAUTH1": {
|
||||
"access_token_id": "cce0b8be7"
|
||||
}</programlisting>
|
||||
<para>para2</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""This is an example request:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"OS-OAUTH1": {
|
||||
"access_token_id": "cce0b8be7"
|
||||
}
|
||||
|
||||
para2
|
||||
|
||||
""")
|
||||
|
||||
def test_link(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>
|
||||
To create a keypair, make a <link
|
||||
xlink:href="http://developer.openstack.org/#createKeypair">
|
||||
create keypair</link> request.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"To create a keypair, make a "
|
||||
"`create keypair\n"
|
||||
"<http://developer.openstack.org/#createKeypair>`_"
|
||||
" request.\n\n")
|
||||
|
||||
def test_anonymous_link(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>
|
||||
To create a keypair, see <link
|
||||
xlink:href="http://developer.openstack.org/#createKeypair"></link> this link.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
""" # noqa
|
||||
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"To create a keypair, see\n"
|
||||
"`<http://developer.openstack.org/#createKeypair>`__"
|
||||
" this link.\n\n")
|
||||
|
||||
def test_para_inline_code(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>Code sample <code>admin</code></para>
|
||||
<para><code>get task</code>, program!</para>
|
||||
<para><code>get task</code> program!</para>
|
||||
<para>Code sample <code>admin</code> and more code.</para>
|
||||
<para>Code <code>admin</code>.</para>
|
||||
<para>Code <code>admin</code>. Another sentence started.</para>
|
||||
<para>para5</para>
|
||||
<para>Code <code>test</code>: with colon</para>
|
||||
<para>Code <code>test</code>; with semi-colon</para>
|
||||
<para>Code <code>test</code>; with semi-colon and <code>
|
||||
end test</code>.</para>
|
||||
<para>Code (<code>test</code>) with parens.</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""Code sample ``admin``
|
||||
|
||||
``get task``, program!
|
||||
|
||||
``get task`` program!
|
||||
|
||||
Code sample ``admin`` and more code.
|
||||
|
||||
Code ``admin``.
|
||||
|
||||
Code ``admin``. Another sentence started.
|
||||
|
||||
para5
|
||||
|
||||
Code ``test``: with colon
|
||||
|
||||
Code ``test``; with semi-colon
|
||||
|
||||
Code ``test``; with semi-colon and ``end test``.
|
||||
|
||||
Code (``test``) with parens.
|
||||
|
||||
""")
|
||||
|
||||
def test_para_emphasis(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>Some text with <emphasis>bold words at the end of the sentence</emphasis>.</para>
|
||||
<para><emphasis>Bold text is cool</emphasis></para>
|
||||
<para>Multi word text is <emphasis>brilliant</emphasis>and then some.</para>
|
||||
<para>Really <emphasis>bold</emphasis></para>
|
||||
<para>para5</para>
|
||||
<para>This is a <emphasis>sentence</emphasis>; another phrase follows.</para>
|
||||
<para>Text with (<emphasis>parenthesis</emphasis>) and then **text**: with colon.</para>
|
||||
<para>A <emphasis>comma</emphasis>, and more <code>text</code>.</para>
|
||||
<para>Some text with <emphasis>back</emphasis> <emphasis>to back</emphasis>
|
||||
emphasized text.</para>
|
||||
</wadl:doc>
|
||||
""" # noqa
|
||||
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""Some text with **bold words at the end of the sentence**.
|
||||
|
||||
**Bold text is cool**
|
||||
|
||||
Multi word text is **brilliant** and then some.
|
||||
|
||||
Really **bold**
|
||||
|
||||
para5
|
||||
|
||||
This is a **sentence**; another phrase follows.
|
||||
|
||||
Text with (**parenthesis**) and then **text**: with colon.
|
||||
|
||||
A **comma**, and more ``text``.
|
||||
|
||||
Some text with **back** **to back** emphasized text.
|
||||
|
||||
""")
|
||||
|
||||
def test_listitem_para(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>This operation does not accept a request body.</para>
|
||||
<para>Example requests and responses:</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>Show account details and list containers:</para>
|
||||
<para>Some more details.</para>
|
||||
</listitem>
|
||||
<listitem><para>See the example response below.</para></listitem>
|
||||
</itemizedlist>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""This operation does not accept a request body.
|
||||
\nExample requests and responses:
|
||||
\n- Show account details and list containers:
|
||||
\n Some more details.
|
||||
\n- See the example response below.\n\n"""
|
||||
)
|
||||
|
||||
def test_nested_listitem(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Para 1, listitem1</para>
|
||||
<para>Para 2, listitem1</para>
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Embedded item1</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>Embedded item2</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>Embedded item3</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
<para>Para 3, listitem1</para>
|
||||
</listitem>
|
||||
<listitem><para>Para1, listitem2</para></listitem>
|
||||
</itemizedlist>
|
||||
<para>Another para of text</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""- Para 1, listitem1\n\n Para 2, listitem1\n\n - Embedded item1
|
||||
\n - Embedded item2\n\n - Embedded item3\n\n Para 3, listitem1
|
||||
\n- Para1, listitem2\n\nAnother para of text\n\n"""
|
||||
)
|
||||
|
||||
def test_listitem_para_code(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>This operation does not accept a request body.</para>
|
||||
<para>Example requests and responses:</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>Show account details and list containers:</para>
|
||||
<para><code>curl -i
|
||||
$publicURL?format=json -X GET -H
|
||||
"X-Auth-Token: $token"</code></para>
|
||||
<para>See the example response below.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""This operation does not accept a request body.
|
||||
\nExample requests and responses:
|
||||
\n- Show account details and list containers:
|
||||
\n ``curl -i $publicURL?format=json -X GET -H \"X-Auth-Token:\n $token\"``
|
||||
\n See the example response below.\n\n"""
|
||||
)
|
||||
|
||||
def test_listitem_para_programlisting(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>Delete the <code>steven</code>container:</para>
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Container command:</para>
|
||||
<para>
|
||||
<code>curl -i $publicURL/steven
|
||||
-X DELETE -H "X-Auth-Token: $token"</code>
|
||||
</para>
|
||||
<para>If the container does not exist, the response is:</para>
|
||||
<para><programlisting>HTTP/1.1 404 Not Found
|
||||
Content-Length: 70
|
||||
Content-Type: text/html; charset=UTF-8
|
||||
Date: Thu, 16 Jan 2014 18:00:20 GMT
|
||||
<html>
|
||||
<h1>Conflict</h1>
|
||||
<p>Trying to complete your request.</p></html>
|
||||
</programlisting></para>
|
||||
</listitem>
|
||||
<listitem><para>Second container command:</para><para>Write to disk.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""Delete the ``steven`` container:
|
||||
\n- Container command:
|
||||
\n ``curl -i $publicURL/steven -X DELETE -H \"X-Auth-Token: $token\"``
|
||||
\n If the container does not exist, the response is:
|
||||
\n ::\n\n HTTP/1.1 404 Not Found\n Content-Length: 70
|
||||
Content-Type: text/html; charset=UTF-8
|
||||
Date: Thu, 16 Jan 2014 18:00:20 GMT
|
||||
<html>\n <h1>Conflict\n </h1>
|
||||
<p>Trying to complete your request.\n </p>
|
||||
</html>\n\n\n- Second container command:
|
||||
\n Write to disk.\n\n"""
|
||||
)
|
||||
|
||||
def test_para_code_block(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>This is an example request:</para>
|
||||
<para><programlisting>GET /v2.0/routers/{router_id}
|
||||
Accept: application/json</programlisting></para>
|
||||
<para>para2</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""This is an example request:
|
||||
|
||||
::
|
||||
|
||||
GET /v2.0/routers/{router_id}
|
||||
Accept: application/json
|
||||
|
||||
|
||||
para2
|
||||
|
||||
""")
|
||||
|
||||
def test_para_code_block_language(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>This is an example request:</para>
|
||||
<para><programlisting language="json">"OS-OAUTH1": {
|
||||
"access_token_id": "cce0b8be7"
|
||||
}</programlisting></para>
|
||||
<para>para2</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""This is an example request:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"OS-OAUTH1": {
|
||||
"access_token_id": "cce0b8be7"
|
||||
}
|
||||
|
||||
|
||||
para2
|
||||
|
||||
""")
|
||||
|
||||
def test_listitem_all_in_one_para(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>Example requests and responses:</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>Copy the <code>goodbye</code> object
|
||||
from the <code>marktwain</code> container to
|
||||
the <code>janeausten</code> container:
|
||||
<code>curl -i $publicURL/marktwain/goodbye
|
||||
-X COPY -H "X-Auth-Token: $token" -H
|
||||
"Destination: janeausten/goodbye"</code>
|
||||
<programlisting>HTTP/1.1 201 Created
|
||||
Content-Length: 0
|
||||
X-Copied-From: marktwain/goodbye
|
||||
</programlisting></para></listitem>
|
||||
</itemizedlist>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""Example requests and responses:
|
||||
\n- Copy the ``goodbye`` object from the ``marktwain`` container to
|
||||
the ``janeausten`` container: ``curl -i
|
||||
$publicURL/marktwain/goodbye -X COPY -H "X-Auth-Token: $token" -H
|
||||
"Destination: janeausten/goodbye"`` ::\n\n HTTP/1.1 201 Created
|
||||
Content-Length: 0\n X-Copied-From: marktwain/goodbye\n\n\n"""
|
||||
)
|
||||
|
||||
def test_table_caption(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<table><caption>Image status</caption></table>
|
||||
<table><caption>Image <code>with a code literal</code>
|
||||
<code>inside</code></caption></table>
|
||||
<table>
|
||||
<caption>A <emphasis>bold</emphasis> caption <emphasis>again</emphasis>
|
||||
</caption></table>
|
||||
<table>
|
||||
<caption>
|
||||
<emphasis role="italic">An italicized</emphasis> caption</caption>
|
||||
</table>
|
||||
<table>
|
||||
<caption>A caption with <emphasis>bold</emphasis> text embedded</caption>
|
||||
</table>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""**Image status**
|
||||
|
||||
++
|
||||
|
||||
**Image with a code literal inside**
|
||||
|
||||
++
|
||||
|
||||
**A bold caption again**
|
||||
|
||||
++
|
||||
|
||||
**An italicized caption**
|
||||
|
||||
++
|
||||
|
||||
**A caption with bold text embedded**
|
||||
|
||||
++
|
||||
|
||||
""")
|
||||
|
||||
|
||||
class TestWADLHandler(unittest.TestCase):
|
||||
|
||||
def test_simple_wadl(self):
|
||||
filename = "api-v1.wadl"
|
||||
api_ref = {
|
||||
"file_tags": {filename: 'things'},
|
||||
"method_tags": {
|
||||
},
|
||||
"resource_tags": {
|
||||
},
|
||||
"service": "lorem",
|
||||
"tags": [],
|
||||
"title": "Lorem Ipsum",
|
||||
"version": "v1"
|
||||
}
|
||||
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<application>
|
||||
<resources xml:id="os-attach-v2">
|
||||
<resource id="version" type="#VersionDetails" path="v2/">
|
||||
<method href="#createThing" />
|
||||
</resource>
|
||||
</resources>
|
||||
<method name="POST" id="createThing">
|
||||
<wadl:doc title="Create interface">
|
||||
<para role="shortdesc">Creates and uses a port interface
|
||||
to attach the port to a server instance.</para>
|
||||
</wadl:doc>
|
||||
<request>
|
||||
<representation mediaType="application/json">
|
||||
<param name="thing-a-imagig" style="plain"
|
||||
type="xsd:string" required="true">
|
||||
<wadl:doc>
|
||||
<para>
|
||||
Specify the <code>interfaceAttachment</code>
|
||||
action in the request body.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
</param>
|
||||
</representation>
|
||||
</request>
|
||||
<response status="202">
|
||||
<representation mediaType="application/json">
|
||||
<param name="thing-a-imagig-response" style="plain"
|
||||
type="xsd:string" required="true">
|
||||
<wadl:doc>
|
||||
<para>
|
||||
Specify the <code>interfaceAttachment</code>
|
||||
action in the request body.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
</param>
|
||||
</representation>
|
||||
</response>
|
||||
</method>
|
||||
</application>
|
||||
"""
|
||||
|
||||
ch = wadl_to_swagger.WADLHandler(filename, api_ref)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
|
||||
self.assertEqual(
|
||||
ch.apis,
|
||||
{'v2/':
|
||||
[{'consumes': [],
|
||||
'description': '',
|
||||
'examples': {},
|
||||
'operationId': 'createThing',
|
||||
'method': 'post',
|
||||
'parameters': [
|
||||
{'description': '',
|
||||
'in': 'body',
|
||||
'name': 'body',
|
||||
'required': False,
|
||||
'schema': {'$ref':
|
||||
'#/definitions/createThing'}}],
|
||||
'produces': [],
|
||||
'responses': {'202': {'examples': {},
|
||||
'headers': {},
|
||||
'schema': {
|
||||
'$ref':
|
||||
'#/definitions/createThing_202'},
|
||||
'description': ''}},
|
||||
'summary': 'Creates and uses a port interface '
|
||||
'to attach the port to a server instance.',
|
||||
'tags': ['things'],
|
||||
'title': 'Create interface'}]})
|
||||
|
||||
self.assertEqual(
|
||||
ch.schemas,
|
||||
{'createThing':
|
||||
{'properties':
|
||||
{'thing-a-imagig':
|
||||
{'description':
|
||||
'Specify the ``interfaceAttachment``'
|
||||
' action in the request body.',
|
||||
'format': '',
|
||||
'required': True,
|
||||
'type': 'string'}},
|
||||
'type': 'object'},
|
||||
'createThing_202':
|
||||
{'properties':
|
||||
{'thing-a-imagig-response':
|
||||
{'description':
|
||||
'Specify the ``interfaceAttachment``'
|
||||
' action in the request body.',
|
||||
'format': '',
|
||||
'required': True,
|
||||
'type': 'string'}},
|
||||
'type': 'object'}}
|
||||
)
|
|
@ -1,873 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
import unittest
|
||||
import xml.sax
|
||||
|
||||
from fairy_slipper.cmd import wadl_to_swagger_valid
|
||||
|
||||
|
||||
class MockParent(object):
|
||||
result = None
|
||||
rest = None
|
||||
|
||||
def detach_subparser(self, result, **kwargs):
|
||||
self.result = result
|
||||
self.kwargs = kwargs
|
||||
|
||||
|
||||
class TestParaParser(unittest.TestCase):
|
||||
|
||||
def test_code_block(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>This is an example request:</para>
|
||||
<programlisting>GET /v2.0/routers/{router_id}
|
||||
Accept: application/json</programlisting>
|
||||
<para>para2</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""This is an example request:
|
||||
|
||||
::
|
||||
|
||||
GET /v2.0/routers/{router_id}
|
||||
Accept: application/json
|
||||
|
||||
para2
|
||||
|
||||
""")
|
||||
|
||||
def test_code_block_language(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>This is an example request:</para>
|
||||
<programlisting language="json">"OS-OAUTH1": {
|
||||
"access_token_id": "cce0b8be7"
|
||||
}</programlisting>
|
||||
<para>para2</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""This is an example request:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"OS-OAUTH1": {
|
||||
"access_token_id": "cce0b8be7"
|
||||
}
|
||||
|
||||
para2
|
||||
|
||||
""")
|
||||
|
||||
def test_link(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>
|
||||
To create a keypair, make a <link
|
||||
xlink:href="http://developer.openstack.org/#createKeypair">
|
||||
create keypair</link> request.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"To create a keypair, make a "
|
||||
"`create keypair\n"
|
||||
"<http://developer.openstack.org/#createKeypair>`_"
|
||||
" request.\n\n")
|
||||
|
||||
def test_anonymous_link(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>
|
||||
To create a keypair, see <link
|
||||
xlink:href="http://developer.openstack.org/#createKeypair"></link> this link.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
""" # noqa
|
||||
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"To create a keypair, see\n"
|
||||
"`<http://developer.openstack.org/#createKeypair>`__"
|
||||
" this link.\n\n")
|
||||
|
||||
def test_para_inline_code(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>Code sample <code>admin</code></para>
|
||||
<para><code>get task</code>, program!</para>
|
||||
<para><code>get task</code> program!</para>
|
||||
<para>Code sample <code>admin</code> and more code.</para>
|
||||
<para>Code <code>admin</code>.</para>
|
||||
<para>Code <code>admin</code>. Another sentence started.</para>
|
||||
<para>para5</para>
|
||||
<para>Code <code>test</code>: with colon</para>
|
||||
<para>Code <code>test</code>; with semi-colon</para>
|
||||
<para>Code <code>test</code>; with semi-colon and <code>
|
||||
end test</code>.</para>
|
||||
<para>Code (<code>test</code>) with parens.</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""Code sample ``admin``
|
||||
|
||||
``get task``, program!
|
||||
|
||||
``get task`` program!
|
||||
|
||||
Code sample ``admin`` and more code.
|
||||
|
||||
Code ``admin``.
|
||||
|
||||
Code ``admin``. Another sentence started.
|
||||
|
||||
para5
|
||||
|
||||
Code ``test``: with colon
|
||||
|
||||
Code ``test``; with semi-colon
|
||||
|
||||
Code ``test``; with semi-colon and ``end test``.
|
||||
|
||||
Code (``test``) with parens.
|
||||
|
||||
""")
|
||||
|
||||
def test_para_emphasis(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>Some text with <emphasis>bold words at the end of the sentence</emphasis>.</para>
|
||||
<para><emphasis>Bold text is cool</emphasis></para>
|
||||
<para>Multi word text is <emphasis>brilliant</emphasis>and then some.</para>
|
||||
<para>Really <emphasis>bold</emphasis></para>
|
||||
<para>para5</para>
|
||||
<para>This is a <emphasis>sentence</emphasis>; another phrase follows.</para>
|
||||
<para>Text with (<emphasis>parenthesis</emphasis>) and then **text**: with colon.</para>
|
||||
<para>A <emphasis>comma</emphasis>, and more <code>text</code>.</para>
|
||||
<para>Some text with <emphasis>back</emphasis> <emphasis>to back</emphasis>
|
||||
emphasized text.</para>
|
||||
</wadl:doc>
|
||||
""" # noqa
|
||||
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""Some text with **bold words at the end of the sentence**.
|
||||
|
||||
**Bold text is cool**
|
||||
|
||||
Multi word text is **brilliant** and then some.
|
||||
|
||||
Really **bold**
|
||||
|
||||
para5
|
||||
|
||||
This is a **sentence**; another phrase follows.
|
||||
|
||||
Text with (**parenthesis**) and then **text**: with colon.
|
||||
|
||||
A **comma**, and more ``text``.
|
||||
|
||||
Some text with **back** **to back** emphasized text.
|
||||
|
||||
""")
|
||||
|
||||
def test_listitem_para(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>This operation does not accept a request body.</para>
|
||||
<para>Example requests and responses:</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>Show account details and list containers:</para>
|
||||
<para>Some more details.</para>
|
||||
</listitem>
|
||||
<listitem><para>See the example response below.</para></listitem>
|
||||
</itemizedlist>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""This operation does not accept a request body.
|
||||
\nExample requests and responses:
|
||||
\n- Show account details and list containers:
|
||||
\n Some more details.
|
||||
\n- See the example response below.\n\n"""
|
||||
)
|
||||
|
||||
def test_nested_listitem(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Para 1, listitem1</para>
|
||||
<para>Para 2, listitem1</para>
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Embedded item1</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>Embedded item2</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>Embedded item3</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
<para>Para 3, listitem1</para>
|
||||
</listitem>
|
||||
<listitem><para>Para1, listitem2</para></listitem>
|
||||
</itemizedlist>
|
||||
<para>Another para of text</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""- Para 1, listitem1\n\n Para 2, listitem1\n\n - Embedded item1
|
||||
\n - Embedded item2\n\n - Embedded item3\n\n Para 3, listitem1
|
||||
\n- Para1, listitem2\n\nAnother para of text\n\n"""
|
||||
)
|
||||
|
||||
def test_listitem_para_code(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>This operation does not accept a request body.</para>
|
||||
<para>Example requests and responses:</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>Show account details and list containers:</para>
|
||||
<para><code>curl -i
|
||||
$publicURL?format=json -X GET -H
|
||||
"X-Auth-Token: $token"</code></para>
|
||||
<para>See the example response below.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""This operation does not accept a request body.
|
||||
\nExample requests and responses:
|
||||
\n- Show account details and list containers:
|
||||
\n ``curl -i $publicURL?format=json -X GET -H \"X-Auth-Token:\n $token\"``
|
||||
\n See the example response below.\n\n"""
|
||||
)
|
||||
|
||||
def test_listitem_para_programlisting(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>Delete the <code>steven</code>container:</para>
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>Container command:</para>
|
||||
<para>
|
||||
<code>curl -i $publicURL/steven
|
||||
-X DELETE -H "X-Auth-Token: $token"</code>
|
||||
</para>
|
||||
<para>If the container does not exist, the response is:</para>
|
||||
<para><programlisting>HTTP/1.1 404 Not Found
|
||||
Content-Length: 70
|
||||
Content-Type: text/html; charset=UTF-8
|
||||
Date: Thu, 16 Jan 2014 18:00:20 GMT
|
||||
<html>
|
||||
<h1>Conflict</h1>
|
||||
<p>Trying to complete your request.</p></html>
|
||||
</programlisting></para>
|
||||
</listitem>
|
||||
<listitem><para>Second container command:</para><para>Write to disk.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""Delete the ``steven`` container:
|
||||
\n- Container command:
|
||||
\n ``curl -i $publicURL/steven -X DELETE -H \"X-Auth-Token: $token\"``
|
||||
\n If the container does not exist, the response is:
|
||||
\n ::\n\n HTTP/1.1 404 Not Found\n Content-Length: 70
|
||||
Content-Type: text/html; charset=UTF-8
|
||||
Date: Thu, 16 Jan 2014 18:00:20 GMT
|
||||
<html>\n <h1>Conflict\n </h1>
|
||||
<p>Trying to complete your request.\n </p>
|
||||
</html>\n\n\n- Second container command:
|
||||
\n Write to disk.\n\n"""
|
||||
)
|
||||
|
||||
def test_para_code_block(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>This is an example request:</para>
|
||||
<para><programlisting>GET /v2.0/routers/{router_id}
|
||||
Accept: application/json</programlisting></para>
|
||||
<para>para2</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""This is an example request:
|
||||
|
||||
::
|
||||
|
||||
GET /v2.0/routers/{router_id}
|
||||
Accept: application/json
|
||||
|
||||
|
||||
para2
|
||||
|
||||
""")
|
||||
|
||||
def test_para_code_block_language(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>This is an example request:</para>
|
||||
<para><programlisting language="json">"OS-OAUTH1": {
|
||||
"access_token_id": "cce0b8be7"
|
||||
}</programlisting></para>
|
||||
<para>para2</para>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""This is an example request:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"OS-OAUTH1": {
|
||||
"access_token_id": "cce0b8be7"
|
||||
}
|
||||
|
||||
|
||||
para2
|
||||
|
||||
""")
|
||||
|
||||
def test_listitem_all_in_one_para(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<para>Example requests and responses:</para>
|
||||
<itemizedlist>
|
||||
<listitem><para>Copy the <code>goodbye</code> object
|
||||
from the <code>marktwain</code> container to
|
||||
the <code>janeausten</code> container:
|
||||
<code>curl -i $publicURL/marktwain/goodbye
|
||||
-X COPY -H "X-Auth-Token: $token" -H
|
||||
"Destination: janeausten/goodbye"</code>
|
||||
<programlisting>HTTP/1.1 201 Created
|
||||
Content-Length: 0
|
||||
X-Copied-From: marktwain/goodbye
|
||||
</programlisting></para></listitem>
|
||||
</itemizedlist>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""Example requests and responses:
|
||||
\n- Copy the ``goodbye`` object from the ``marktwain`` container to
|
||||
the ``janeausten`` container: ``curl -i
|
||||
$publicURL/marktwain/goodbye -X COPY -H "X-Auth-Token: $token" -H
|
||||
"Destination: janeausten/goodbye"`` ::\n\n HTTP/1.1 201 Created
|
||||
Content-Length: 0\n X-Copied-From: marktwain/goodbye\n\n\n"""
|
||||
)
|
||||
|
||||
def test_table_caption(self):
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wadl:doc>
|
||||
<table><caption>Image status</caption></table>
|
||||
<table><caption>Image <code>with a code literal</code>
|
||||
<code>inside</code></caption></table>
|
||||
<table>
|
||||
<caption>A <emphasis>bold</emphasis> caption <emphasis>again</emphasis>
|
||||
</caption></table>
|
||||
<table>
|
||||
<caption>
|
||||
<emphasis role="italic">An italicized</emphasis> caption</caption>
|
||||
</table>
|
||||
<table>
|
||||
<caption>A caption with <emphasis>bold</emphasis> text embedded</caption>
|
||||
</table>
|
||||
</wadl:doc>
|
||||
"""
|
||||
parent = MockParent()
|
||||
ch = wadl_to_swagger_valid.ParaParser(parent)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
self.assertEqual(
|
||||
parent.result,
|
||||
"""**Image status**
|
||||
|
||||
++
|
||||
|
||||
**Image with a code literal inside**
|
||||
|
||||
++
|
||||
|
||||
**A bold caption again**
|
||||
|
||||
++
|
||||
|
||||
**An italicized caption**
|
||||
|
||||
++
|
||||
|
||||
**A caption with bold text embedded**
|
||||
|
||||
++
|
||||
|
||||
""")
|
||||
|
||||
|
||||
class TestWADLHandler(unittest.TestCase):
|
||||
|
||||
def test_simple_wadl(self):
|
||||
filename = "api-v1.wadl"
|
||||
api_ref = {
|
||||
"file_tags": {filename: 'things'},
|
||||
"method_tags": {
|
||||
},
|
||||
"resource_tags": {
|
||||
},
|
||||
"service": "lorem",
|
||||
"tags": [],
|
||||
"title": "Lorem Ipsum",
|
||||
"version": "v1"
|
||||
}
|
||||
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<application>
|
||||
<resources xml:id="os-attach-v2">
|
||||
<resource id="version" type="#VersionDetails" path="/v2">
|
||||
<method href="#createThing" />
|
||||
</resource>
|
||||
</resources>
|
||||
<method name="POST" id="createThing">
|
||||
<wadl:doc title="Create interface">
|
||||
<para role="shortdesc">Create a port interface.</para>
|
||||
</wadl:doc>
|
||||
<request>
|
||||
<representation mediaType="application/json">
|
||||
<param name="thing-a-imagig" style="plain"
|
||||
type="xsd:string" required="true">
|
||||
<wadl:doc>
|
||||
<para>
|
||||
Specify the <code>interfaceAttachment</code>
|
||||
action in the request body.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
</param>
|
||||
</representation>
|
||||
</request>
|
||||
<response status="202">
|
||||
<representation mediaType="application/json">
|
||||
<param name="thing-a-imagig-response" style="plain"
|
||||
type="xsd:string" required="true">
|
||||
<wadl:doc>
|
||||
<para>
|
||||
Specify the <code>interfaceAttachment</code>
|
||||
action in the request body.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
</param>
|
||||
</representation>
|
||||
</response>
|
||||
</method>
|
||||
</application>
|
||||
"""
|
||||
|
||||
ch = wadl_to_swagger_valid.WADLHandler(filename, api_ref)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
|
||||
self.assertEqual(
|
||||
ch.apis,
|
||||
{'/v2':
|
||||
[{'method': 'post',
|
||||
'consumes': [],
|
||||
'description': '',
|
||||
'tags': ['things'],
|
||||
'x-title': 'Create interface',
|
||||
'operationId': 'createThing',
|
||||
'parameters': [
|
||||
{'description': '',
|
||||
'in': 'body',
|
||||
'name': 'body',
|
||||
'required': False,
|
||||
'schema': {'$ref':
|
||||
'#/definitions/createThing'}}],
|
||||
'produces': [],
|
||||
'responses': {'202': {'examples': {},
|
||||
'headers': {},
|
||||
'schema': {
|
||||
'$ref':
|
||||
'#/definitions/createThing_202'},
|
||||
'description': ''}},
|
||||
'summary': 'Create a port interface.'}]})
|
||||
|
||||
# api structure differs from final swagger file output
|
||||
self.assertEqual(
|
||||
ch.schemas,
|
||||
{'createThing':
|
||||
{'required': ['thing-a-imagig'],
|
||||
'properties':
|
||||
{'thing-a-imagig':
|
||||
{'description':
|
||||
'Specify the ``interfaceAttachment``'
|
||||
' action in the request body.',
|
||||
'format': '',
|
||||
'type': 'string'}},
|
||||
'type': 'object'},
|
||||
'createThing_202':
|
||||
{'required': ['thing-a-imagig-response'],
|
||||
'properties':
|
||||
{'thing-a-imagig-response':
|
||||
{'description':
|
||||
'Specify the ``interfaceAttachment``'
|
||||
' action in the request body.',
|
||||
'format': '',
|
||||
'type': 'string'}},
|
||||
'type': 'object'}}
|
||||
)
|
||||
|
||||
def test_wadl_copy_method(self):
|
||||
filename = "api-v1.wadl"
|
||||
api_ref = {
|
||||
"file_tags": {filename: 'things'},
|
||||
"method_tags": {
|
||||
},
|
||||
"resource_tags": {
|
||||
},
|
||||
"service": "lorem",
|
||||
"tags": [],
|
||||
"title": "Lorem Ipsum",
|
||||
"version": "v1"
|
||||
}
|
||||
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<application>
|
||||
<resources xml:id="os-attach-v2">
|
||||
<resource id="version" type="#VersionDetails" path="/v2">
|
||||
<method href="#copyThing" />
|
||||
</resource>
|
||||
</resources>
|
||||
<method name="COPY" id="copyThing">
|
||||
<wadl:doc title="Copy interface">
|
||||
<para role="shortdesc">Copy a port interface.</para>
|
||||
</wadl:doc>
|
||||
<request>
|
||||
<representation mediaType="application/json">
|
||||
<param name="copy-request" style="plain"
|
||||
type="xsd:string" required="true">
|
||||
<wadl:doc>
|
||||
<para>
|
||||
Copy the <code>interfaceAttachment</code>.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
</param>
|
||||
</representation>
|
||||
</request>
|
||||
<response status="202">
|
||||
<representation mediaType="application/json">
|
||||
<param name="copy-response" style="plain"
|
||||
type="xsd:string" required="true">
|
||||
<wadl:doc>
|
||||
<para>
|
||||
Copy response text.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
</param>
|
||||
</representation>
|
||||
</response>
|
||||
</method>
|
||||
</application>
|
||||
"""
|
||||
|
||||
ch = wadl_to_swagger_valid.WADLHandler(filename, api_ref)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
|
||||
# api structure differs from final swagger file output
|
||||
self.assertEqual(
|
||||
ch.apis,
|
||||
{'/v2':
|
||||
[{'method': 'x-copy',
|
||||
'consumes': [],
|
||||
'description': '',
|
||||
'tags': ['things'],
|
||||
'x-title': 'Copy interface',
|
||||
'operationId': 'copyThing',
|
||||
'parameters': [
|
||||
{
|
||||
'description': '',
|
||||
'in': 'body',
|
||||
'name': 'body',
|
||||
'required': False,
|
||||
'schema': {'$ref': '#/definitions/copyThing'}
|
||||
}],
|
||||
'produces': [],
|
||||
'responses': {'202': {'examples': {},
|
||||
'headers': {},
|
||||
'schema': {
|
||||
'$ref':
|
||||
'#/definitions/copyThing_202'},
|
||||
'description': ''}},
|
||||
'summary': 'Copy a port interface.'}]})
|
||||
|
||||
self.assertEqual(
|
||||
ch.schemas,
|
||||
{'copyThing':
|
||||
{'required': ['copy-request'],
|
||||
'properties':
|
||||
{'copy-request':
|
||||
{'description':
|
||||
'Copy the ``interfaceAttachment``.',
|
||||
'format': '',
|
||||
'type': 'string'}},
|
||||
'type': 'object'},
|
||||
'copyThing_202':
|
||||
{'required': ['copy-response'],
|
||||
'properties':
|
||||
{'copy-response':
|
||||
{'description':
|
||||
'Copy response text.',
|
||||
'format': '',
|
||||
'type': 'string'}},
|
||||
'type': 'object'}}
|
||||
)
|
||||
|
||||
def test_wadl_multiple_actions(self):
|
||||
filename = "api-v1.wadl"
|
||||
api_ref = {
|
||||
"file_tags": {filename: 'things'},
|
||||
"method_tags": {
|
||||
},
|
||||
"resource_tags": {
|
||||
},
|
||||
"service": "lorem",
|
||||
"tags": [],
|
||||
"title": "Lorem Ipsum",
|
||||
"version": "v1"
|
||||
}
|
||||
|
||||
file_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<application>
|
||||
<resources xml:id="os-server-actions-v2">
|
||||
<resource id="server_id" type="#VersionDetails" path="/v2">
|
||||
<method href="#doThing1" />
|
||||
<method href="#doThing2" />
|
||||
</resource>
|
||||
</resources>
|
||||
<method name="POST" id="doThing1">
|
||||
<wadl:doc title="Create interface">
|
||||
<para role="shortdesc">Create a port interface.</para>
|
||||
</wadl:doc>
|
||||
<request>
|
||||
<representation mediaType="application/json">
|
||||
<param name="copy-request" style="plain"
|
||||
type="xsd:string" required="true">
|
||||
<wadl:doc>
|
||||
<para>
|
||||
Copy the <code>interfaceAttachment</code>.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
</param>
|
||||
</representation>
|
||||
</request>
|
||||
<response status="202">
|
||||
<representation mediaType="application/json">
|
||||
<param name="copy-response" style="plain"
|
||||
type="xsd:string" required="true">
|
||||
<wadl:doc>
|
||||
<para>
|
||||
Copy response text.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
</param>
|
||||
</representation>
|
||||
</response>
|
||||
</method>
|
||||
<method name="POST" id="doThing2">
|
||||
<wadl:doc title="Create interface">
|
||||
<para role="shortdesc">Create a port interface.</para>
|
||||
</wadl:doc>
|
||||
<request>
|
||||
<representation mediaType="application/json">
|
||||
<param name="copy-request" style="plain"
|
||||
type="xsd:string" required="true">
|
||||
<wadl:doc>
|
||||
<para>
|
||||
Copy the <code>interfaceAttachment</code>.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
</param>
|
||||
</representation>
|
||||
</request>
|
||||
<response status="202">
|
||||
<representation mediaType="application/json">
|
||||
<param name="copy-response" style="plain"
|
||||
type="xsd:string" required="true">
|
||||
<wadl:doc>
|
||||
<para>
|
||||
Copy response text.
|
||||
</para>
|
||||
</wadl:doc>
|
||||
</param>
|
||||
</representation>
|
||||
</response>
|
||||
</method>
|
||||
</application>
|
||||
"""
|
||||
|
||||
ch = wadl_to_swagger_valid.WADLHandler(filename, api_ref)
|
||||
xml.sax.parse(StringIO(file_content), ch)
|
||||
|
||||
# api structure differs from final swagger file output
|
||||
self.assertEqual(
|
||||
ch.apis,
|
||||
{'/v2':
|
||||
[{'method': 'post',
|
||||
'consumes': [],
|
||||
'description': '',
|
||||
'tags': ['things'],
|
||||
'x-title': 'Create interface',
|
||||
'operationId': 'doThing1',
|
||||
'parameters': [
|
||||
{
|
||||
'description': '',
|
||||
'in': 'body',
|
||||
'name': 'body',
|
||||
'required': False,
|
||||
'schema': {'$ref': '#/definitions/doThing1'}
|
||||
}],
|
||||
'produces': [],
|
||||
'responses': {'202': {'examples': {},
|
||||
'headers': {},
|
||||
'schema': {
|
||||
'$ref':
|
||||
'#/definitions/doThing1_202'},
|
||||
'description': ''}},
|
||||
'summary': 'Create a port interface.'},
|
||||
{'method': 'post',
|
||||
'consumes': [],
|
||||
'description': '',
|
||||
'tags': ['things'],
|
||||
'x-title': 'Create interface',
|
||||
'operationId': 'doThing2',
|
||||
'parameters': [
|
||||
{
|
||||
'description': '',
|
||||
'in': 'body',
|
||||
'name': 'body',
|
||||
'required': False,
|
||||
'schema': {'$ref': '#/definitions/doThing2'}
|
||||
}],
|
||||
'produces': [],
|
||||
'responses': {'202': {'examples': {},
|
||||
'headers': {},
|
||||
'schema': {
|
||||
'$ref':
|
||||
'#/definitions/doThing2_202'},
|
||||
'description': ''}},
|
||||
'summary': 'Create a port interface.'}]})
|
||||
|
||||
self.assertEqual(
|
||||
ch.schemas,
|
||||
{'doThing1':
|
||||
{'required': ['copy-request'],
|
||||
'properties':
|
||||
{'copy-request':
|
||||
{'description':
|
||||
'Copy the ``interfaceAttachment``.',
|
||||
'format': '',
|
||||
'type': 'string'}},
|
||||
'type': 'object'},
|
||||
'doThing1_202':
|
||||
{'required': ['copy-response'],
|
||||
'properties':
|
||||
{'copy-response':
|
||||
{'description':
|
||||
'Copy response text.',
|
||||
'format': '',
|
||||
'type': 'string'}},
|
||||
'type': 'object'},
|
||||
'doThing2':
|
||||
{'required': ['copy-request'],
|
||||
'properties':
|
||||
{'copy-request':
|
||||
{'description':
|
||||
'Copy the ``interfaceAttachment``.',
|
||||
'format': '',
|
||||
'type': 'string'}},
|
||||
'type': 'object'},
|
||||
'doThing2_202':
|
||||
{'required': ['copy-response'],
|
||||
'properties':
|
||||
{'copy-response':
|
||||
{'description':
|
||||
'Copy response text.',
|
||||
'format': '',
|
||||
'type': 'string'}},
|
||||
'type': 'object'}}
|
||||
)
|
|
@ -1,74 +0,0 @@
|
|||
// validate_test.js
|
||||
// Validation of generated Swagger files
|
||||
// run 'mocha' in mocha dir
|
||||
|
||||
'use strict';
|
||||
var v = require('json-schema-remote');
|
||||
var fs = require('fs');
|
||||
|
||||
var converted_files = ["blockstorage-v1-swagger.json",
|
||||
"blockstorage-v2-swagger.json",
|
||||
"compute-v2.1-swagger.json",
|
||||
"data-processing-v1.1-swagger.json",
|
||||
"database-v1-swagger.json",
|
||||
"identity-admin-v2-swagger.json",
|
||||
"identity-extensions-v2-swagger.json",
|
||||
"identity-v2-swagger.json",
|
||||
"identity-v3-swagger.json",
|
||||
"image-v1-swagger.json",
|
||||
"image-v2-swagger.json",
|
||||
"networking-extensions-v2-swagger.json",
|
||||
"networking-v2-swagger.json",
|
||||
"objectstorage-v1-swagger.json"
|
||||
];
|
||||
|
||||
var i = 0;
|
||||
|
||||
function validate_file(filename, done) {
|
||||
var swagger_file = fs.readFileSync('../../../conversion_files_valid/'.concat(filename), 'utf8');
|
||||
var myswagger = JSON.parse(swagger_file);
|
||||
|
||||
v.validate(myswagger, 'http://swagger.io/v2/schema.json', function(err, isValid) {
|
||||
if (err) {
|
||||
done(err);
|
||||
console.error('Error occurred', err);
|
||||
}
|
||||
if (isValid) {
|
||||
console.log(filename.concat(" is valid."));
|
||||
done();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// mocha test suite and tests
|
||||
describe('Validate generated swagger:', function() {
|
||||
// identity and compute take some time to validate
|
||||
this.timeout(12000);
|
||||
var filen = '';
|
||||
|
||||
before(function() {
|
||||
// TODO, check whether preload call is actually pre-loading the schema
|
||||
v.preload({
|
||||
$schema:'http://swagger.io/v2/schema.json',
|
||||
id:'',
|
||||
type:''
|
||||
});
|
||||
});
|
||||
|
||||
beforeEach(function() {
|
||||
filen = converted_files[i];
|
||||
console.log(filen);
|
||||
});
|
||||
|
||||
afterEach(function() {
|
||||
i += 1;
|
||||
});
|
||||
|
||||
var j = 0;
|
||||
for (j = i; j < converted_files.length; j++) {
|
||||
// Validate each file
|
||||
it(filen, function(done) {
|
||||
validate_file(filen, done);
|
||||
});
|
||||
}
|
||||
});
|
|
@ -1,33 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
test_fairy_slipper
|
||||
----------------------------------
|
||||
|
||||
Tests for `fairy_slipper` module.
|
||||
"""
|
||||
|
||||
from fairy_slipper.tests import base
|
||||
|
||||
|
||||
class TestFairySlipper(base.TestCase):
|
||||
|
||||
def test_something(self):
|
||||
pass
|
|
@ -1,88 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from fairy_slipper.tests import FunctionalTest
|
||||
|
||||
|
||||
class TestRootControllerNegative(FunctionalTest):
|
||||
|
||||
def setUp(self):
|
||||
self.CONFIG['app']['api_doc'] = \
|
||||
'%(confdir)s/fairy_slipper/tests/api_doc_missing'
|
||||
super(TestRootControllerNegative, self).setUp()
|
||||
|
||||
def test_get_doc_index(self):
|
||||
response = self.app.get('/doc/')
|
||||
assert response.json == []
|
||||
assert response.status_int == 200
|
||||
|
||||
|
||||
class TestRootController(FunctionalTest):
|
||||
|
||||
def test_get(self):
|
||||
response = self.app.get('/')
|
||||
assert response.status_int == 200
|
||||
|
||||
def test_search(self):
|
||||
response = self.app.post('/', params={'q': 'RestController'})
|
||||
assert response.status_int == 200
|
||||
|
||||
def test_get_not_found(self):
|
||||
response = self.app.get('/a/bogus/url', expect_errors=True)
|
||||
assert response.status_int == 404
|
||||
|
||||
def test_get_doc_index(self):
|
||||
response = self.app.get('/doc/')
|
||||
assert response.json == [
|
||||
{'url': 'identity/v2/',
|
||||
'version': 'v2',
|
||||
'license': {
|
||||
'url': 'http://www.apache.org/licenses/LICENSE-2.0.html',
|
||||
'name': 'Apache 2.0'},
|
||||
'service': 'identity',
|
||||
'title': 'Identity'}]
|
||||
assert response.status_int == 200
|
||||
|
||||
def test_get_doc_identity(self):
|
||||
response = self.app.get('/doc/identity/', expect_errors=True)
|
||||
assert response.status_int == 404
|
||||
|
||||
def test_get_doc_identity_v2(self):
|
||||
response = self.app.get('/doc/identity/v2/')
|
||||
assert response.json == \
|
||||
{'info':
|
||||
{'url': 'identity/v2/',
|
||||
'version': 'v2',
|
||||
'license': {
|
||||
'url': 'http://www.apache.org/licenses/LICENSE-2.0.html',
|
||||
'name': 'Apache 2.0'},
|
||||
'service': 'identity',
|
||||
'title': 'Identity'},
|
||||
'paths': {'/': [{'responses': {},
|
||||
'parameters': [],
|
||||
'produces': [],
|
||||
'consumes': [],
|
||||
'tags': ['simple'],
|
||||
'summary': '',
|
||||
'title': 'Simple route',
|
||||
'method': 'get',
|
||||
'description': ''}]},
|
||||
'tags': [{'description': '',
|
||||
'name': 'simple',
|
||||
'summary': 'Simple Tag'}]}
|
||||
assert response.status_int == 200
|
|
@ -1,771 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import unittest
|
||||
|
||||
import docutils.core
|
||||
|
||||
from fairy_slipper import rest
|
||||
from fairy_slipper.rest import JSONWriter
|
||||
|
||||
|
||||
def minimal_method_json(consumes=[],
|
||||
description='',
|
||||
method='get',
|
||||
parameters=[],
|
||||
produces=[],
|
||||
responses={},
|
||||
summary='',
|
||||
tags=[],
|
||||
title=''):
|
||||
return dict(consumes=consumes,
|
||||
description=description,
|
||||
method=method,
|
||||
parameters=parameters,
|
||||
produces=produces,
|
||||
responses=responses,
|
||||
summary=summary,
|
||||
tags=tags,
|
||||
title=title)
|
||||
|
||||
|
||||
class TestReSTMethod(unittest.TestCase):
|
||||
|
||||
def test_no_path(self):
|
||||
rst = """
|
||||
.. http:get::
|
||||
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths': {}, 'tags': []}
|
||||
|
||||
def test_path_with_body(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
body
|
||||
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path': [
|
||||
minimal_method_json(description='body\n\n')]},
|
||||
'tags': []}
|
||||
|
||||
def test_minimal(self):
|
||||
rst = """
|
||||
.. http:%s:: /path
|
||||
|
||||
"""
|
||||
for method in ['get', 'post', 'put', 'patch',
|
||||
'options', 'head', 'delete', 'copy']:
|
||||
json = docutils.core.publish_string(
|
||||
rst % method, writer=JSONWriter())
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path': [
|
||||
minimal_method_json(method=method)]},
|
||||
'tags': []}
|
||||
|
||||
def test_body_literal(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
literal block::
|
||||
|
||||
banana
|
||||
1
|
||||
2
|
||||
3
|
||||
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description='''literal block:
|
||||
|
||||
```
|
||||
banana
|
||||
1
|
||||
2
|
||||
3
|
||||
```
|
||||
''')]},
|
||||
'tags': []}
|
||||
|
||||
def body_code_block(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
Some text before code-block
|
||||
|
||||
.. code-block::json
|
||||
|
||||
the first item
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''Some text before code-block\n``` the first item```
|
||||
'''
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_body_ul(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
Some normal body text
|
||||
|
||||
- the first item
|
||||
- the second item
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''Some normal body text
|
||||
|
||||
|
||||
* the first item
|
||||
|
||||
* the second item
|
||||
|
||||
'''
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_body_ul_with_literal(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
Some normal body text
|
||||
|
||||
- the first item
|
||||
- the second item
|
||||
|
||||
A new paragraph under second item
|
||||
|
||||
- Create object:
|
||||
|
||||
``curl -i $publicURL/janeausten/helloworld.txt -X PUT -H
|
||||
"Content-Length: 1" -H "Content-Type: text/html; charset=UTF-8"
|
||||
-H "X-Auth-Token: $token"``
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''Some normal body text
|
||||
|
||||
|
||||
* the first item
|
||||
|
||||
* the second item
|
||||
|
||||
A new paragraph under second item
|
||||
|
||||
* Create object:
|
||||
|
||||
`curl -i $publicURL/janeausten/helloworld.txt -X PUT -H
|
||||
"Content-Length: 1" -H "Content-Type: text/html; charset=UTF-8"
|
||||
-H "X-Auth-Token: $token"`
|
||||
|
||||
'''
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_body_ul_with_literal_block(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
Some normal body text
|
||||
|
||||
- the first item
|
||||
- the second item
|
||||
- Create object:
|
||||
|
||||
|
||||
::
|
||||
|
||||
HTTP/1.1 201 Created
|
||||
Last-Modified: Fri, 17 Jan 2014 17:28:35 GMT
|
||||
Content-Length: 116
|
||||
X-Trans-Id: tx4d5e4f06d357462bb732f-0052d96843
|
||||
Date: Fri, 17 Jan 2014 17:28:35 GMT
|
||||
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''Some normal body text
|
||||
|
||||
|
||||
* the first item
|
||||
|
||||
* the second item
|
||||
|
||||
* Create object:
|
||||
|
||||
HTTP/1.1 201 Created
|
||||
Last-Modified: Fri, 17 Jan 2014 17:28:35 GMT
|
||||
Content-Length: 116
|
||||
X-Trans-Id: tx4d5e4f06d357462bb732f-0052d96843
|
||||
Date: Fri, 17 Jan 2014 17:28:35 GMT
|
||||
|
||||
'''
|
||||
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_body_ul_with_ul(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
Some normal body text
|
||||
|
||||
- the first item
|
||||
- the second item
|
||||
|
||||
A new paragraph under second item
|
||||
|
||||
- item under second item:
|
||||
|
||||
``curl -i $publicURL/janeausten/helloworld.txt``
|
||||
|
||||
And some more text
|
||||
|
||||
- the third item
|
||||
|
||||
Some normal body text again
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''Some normal body text
|
||||
|
||||
|
||||
* the first item
|
||||
|
||||
* the second item
|
||||
|
||||
A new paragraph under second item
|
||||
|
||||
* item under second item:
|
||||
|
||||
`curl -i $publicURL/janeausten/helloworld.txt`
|
||||
|
||||
And some more text
|
||||
|
||||
* the third item
|
||||
|
||||
Some normal body text again
|
||||
|
||||
'''
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_body_strong(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
start text **end**
|
||||
|
||||
**start** end
|
||||
|
||||
start **inline text** end
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''start text **end**
|
||||
|
||||
**start** end
|
||||
|
||||
start **inline text** end
|
||||
|
||||
'''
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_body_inline_literal(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
text ``end inline``
|
||||
|
||||
``start inline`` ending normal
|
||||
|
||||
start text ``inline inline`` end text
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''text `end inline`
|
||||
|
||||
`start inline` ending normal
|
||||
|
||||
start text `inline inline` end text
|
||||
|
||||
'''
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_body_inline_emphasis(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
text *end inline*
|
||||
|
||||
*start inline* ending normal
|
||||
|
||||
start text *inline inline* end text
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''text _end inline_
|
||||
|
||||
_start inline_ ending normal
|
||||
|
||||
start text _inline inline_ end text
|
||||
|
||||
'''
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_synopsis(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
:synopsis: Some description of the operation
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(
|
||||
summary='Some description of the operation')]},
|
||||
'tags': []}
|
||||
|
||||
def test_title(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
:title: Path Thing
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(title='Path Thing')]},
|
||||
'tags': []}
|
||||
|
||||
def test_hyperlink(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
For more information about form POST see `Object
|
||||
Storage API v1 (SUPPORTED) <http://docs.openstack.org/api
|
||||
/openstack-object-storage/1.0/content/>`_.
|
||||
|
||||
Example requests and responses:
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''For more information about form POST see [Object\nStorage API v1 (SUPPORTED)](http://docs.openstack.org/api/openstack-object-storage/1.0/content/).
|
||||
|
||||
Example requests and responses:
|
||||
|
||||
''' # noqa
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_table_simple(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
Some text before table
|
||||
|
||||
|
||||
+---------+---------+--------------+----------+
|
||||
| Field 1 | Field 2 | Field 3 | Field 4 |
|
||||
+---------+---------+--------------+----------+
|
||||
| Apply | Name | Description | Required |
|
||||
+---------+---------+--------------+----------+
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''Some text before table
|
||||
|
||||
| Field 1 | Field 2 | Field 3 | Field 4 |
|
||||
| --- | --- | --- | --- |
|
||||
| Apply | Name | Description | Required |
|
||||
|
||||
|
||||
'''
|
||||
|
||||
json = rest.publish_string(rst)
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_table_inline_strong(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
Some text before table
|
||||
|
||||
|
||||
+---------+---------------+-----------------+------------------+
|
||||
| Field 1 | Field 2 | Field 3 | Field 4 |
|
||||
+---------+---------------+-----------------+------------------+
|
||||
| Apply | text in | | |
|
||||
| | **between** | | |
|
||||
| | text | **text** start | text **end** |
|
||||
+---------+---------------+-----------------+------------------+
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''Some text before table
|
||||
|
||||
| Field 1 | Field 2 | Field 3 | Field 4 |
|
||||
| --- | --- | --- | --- |
|
||||
| Apply | text in<br>**between**<br>text | **text** start | text **end** |
|
||||
|
||||
|
||||
'''
|
||||
|
||||
json = rest.publish_string(rst)
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_table_inline_emphasis(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
Some text before table
|
||||
|
||||
|
||||
+---------+---------------+-----------------+------------------+
|
||||
| Field 1 | Field 2 | Field 3 | Field 4 |
|
||||
+---------+---------------+-----------------+------------------+
|
||||
| Apply | text in | | |
|
||||
| | *between* | | |
|
||||
| | text | *text* start | text *end* |
|
||||
+---------+---------------+-----------------+------------------+
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''Some text before table
|
||||
|
||||
| Field 1 | Field 2 | Field 3 | Field 4 |
|
||||
| --- | --- | --- | --- |
|
||||
| Apply | text in<br>_between_<br>text | _text_ start | text _end_ |
|
||||
|
||||
|
||||
'''
|
||||
json = rest.publish_string(rst)
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_table_multiline_col_entry(self):
|
||||
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
Image status
|
||||
|
||||
+----------------+---------------------------------------------------------------------+
|
||||
| Status | Description |
|
||||
+----------------+---------------------------------------------------------------------+
|
||||
| queued | The Image service reserved an image ID for the image in the |
|
||||
| | registry but has not uploaded any image data. |
|
||||
+----------------+---------------------------------------------------------------------+
|
||||
| saving | The Image service is currently uploading the raw data for the |
|
||||
| | image. |
|
||||
+----------------+---------------------------------------------------------------------+
|
||||
|
||||
""" # noqa
|
||||
markdown = '''Image status
|
||||
|
||||
| Status | Description |
|
||||
| --- | --- |
|
||||
| queued | The Image service reserved an image ID for the image in the<br>registry but has not uploaded any image data. |
|
||||
| saving | The Image service is currently uploading the raw data for the<br>image. |
|
||||
|
||||
|
||||
''' # noqa
|
||||
|
||||
json = rest.publish_string(rst)
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_table_inline_literal(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
Some text before table
|
||||
|
||||
|
||||
+----------------+----------+--------------+----------------+
|
||||
| Field 1 | Field 2 | Field 3 | Field 4 |
|
||||
+----------------+----------+--------------+----------------+
|
||||
| End ``text`` | ``Name`` | Description | ``start`` text |
|
||||
+----------------+----------+--------------+----------------+
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''Some text before table
|
||||
|
||||
| Field 1 | Field 2 | Field 3 | Field 4 |
|
||||
| --- | --- | --- | --- |
|
||||
| End `text` | `Name` | Description | `start` text |
|
||||
|
||||
|
||||
'''
|
||||
|
||||
json = rest.publish_string(rst)
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_table_with_list(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
Some text before table
|
||||
|
||||
+------------------------+------------------------------------------+
|
||||
| Response code | Description |
|
||||
+------------------------+------------------------------------------+
|
||||
| ``Bad Request (400)`` | The Identity service failed to parse the |
|
||||
| | following errors occurred: |
|
||||
| | |
|
||||
| | - A required attribute was missing. |
|
||||
| | |
|
||||
| | - An attribute that is not allowed was a |
|
||||
| | POST request in a basic CRUD op. |
|
||||
| | |
|
||||
| | - An ``attribute`` of an unexpected data |
|
||||
+------------------------+------------------------------------------+
|
||||
| ``Forbidden (403)`` | The identity was successfully authent. |
|
||||
| | authorized to perform the action. |
|
||||
+------------------------+------------------------------------------+
|
||||
|
||||
"""
|
||||
|
||||
markdown = '''Some text before table
|
||||
|
||||
| Response code | Description |
|
||||
| --- | --- |
|
||||
| `Bad Request (400)` | The Identity service failed to parse the<br>following \
|
||||
errors occurred:<ul><li>A required attribute was missing.</li><li>An\
|
||||
attribute that is not allowed was a<br>POST request in a basic CRUD op.\
|
||||
</li><li>An `attribute` of an unexpected data</li></ul> |
|
||||
| `Forbidden (403)` | The identity was successfully authent.<br>authorized\
|
||||
to perform the action. |
|
||||
|
||||
|
||||
'''
|
||||
|
||||
json = rest.publish_string(rst)
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(description=markdown)]},
|
||||
'tags': []}
|
||||
|
||||
def test_method_tags(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
:tag: cool-tag
|
||||
:tag: cool-tag1
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(
|
||||
tags=['cool-tag', 'cool-tag1'])]},
|
||||
'tags': []}
|
||||
|
||||
def test_produces(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
:produces: application/json
|
||||
:produces: text/plain
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(
|
||||
produces=['application/json',
|
||||
'text/plain'])]},
|
||||
'tags': []}
|
||||
|
||||
def test_accepts(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
:accepts: application/json
|
||||
:accepts: text/plain
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(
|
||||
consumes=['application/json',
|
||||
'text/plain'])]},
|
||||
'tags': []}
|
||||
|
||||
def test_parameter(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
:parameter thing: A parameter something.
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(
|
||||
parameters=[{
|
||||
'description': u'A parameter something.',
|
||||
'in': 'path',
|
||||
'name': u'thing',
|
||||
'required': True,
|
||||
'type': 'string'}])]},
|
||||
'tags': []}
|
||||
|
||||
def test_response_example(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
:responseexample 200: example.json
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(
|
||||
responses={'200':
|
||||
{'description': '',
|
||||
'examples':
|
||||
{'application/json':
|
||||
{'$ref':
|
||||
'example.json'}}}})]},
|
||||
'tags': []}
|
||||
|
||||
def test_response_schema(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
:responseschema 200: schema_200.json
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(
|
||||
responses={'200':
|
||||
{'description': '',
|
||||
'schema':
|
||||
{'$ref':
|
||||
'schema_200.json'}}})]},
|
||||
'tags': []}
|
||||
|
||||
def test_statuscode(self):
|
||||
rst = """
|
||||
.. http:get:: /path
|
||||
|
||||
:statuscode 200: Success! Yeah!
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
|
||||
assert json == {'paths':
|
||||
{'/path':
|
||||
[minimal_method_json(
|
||||
responses={'200':
|
||||
{'description': 'Success! Yeah!'}})]},
|
||||
'tags': []}
|
||||
|
||||
|
||||
class TestReSTTag(unittest.TestCase):
|
||||
|
||||
def test_synopsis(self):
|
||||
rst = """
|
||||
.. swagger:tag:: my-tag
|
||||
:synopsis: Interesting things!
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
assert json == {'paths': {},
|
||||
'tags': [{'name': 'my-tag',
|
||||
'description': '',
|
||||
'summary': 'Interesting things!'}]}
|
||||
|
||||
def test_description(self):
|
||||
rst = """
|
||||
.. swagger:tag:: my-tag
|
||||
|
||||
body
|
||||
"""
|
||||
json = rest.publish_string(rst)
|
||||
assert json == {'paths': {},
|
||||
'tags': [{'name': 'my-tag',
|
||||
'description': 'body\n\n',
|
||||
'summary': ''}]}
|
|
@ -1,25 +0,0 @@
|
|||
# Copyright (c) 2015 Russell Sim <russell.sim@gmail.com>
|
||||
#
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
|
||||
class TestUnits(TestCase):
|
||||
|
||||
def test_units(self):
|
||||
assert 5 * 5 == 25
|
|
@ -1,17 +0,0 @@
|
|||
# Copyright 2015 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import pbr.version
|
||||
|
||||
version_info = pbr.version.VersionInfo('fairy_slipper')
|
|
@ -1,64 +0,0 @@
|
|||
module.exports = function(config){
|
||||
var configuration = {
|
||||
|
||||
basePath : './public/',
|
||||
|
||||
files : [
|
||||
"components/angular/angular.js",
|
||||
"components/angular-mocks/angular-mocks.js",
|
||||
"components/angular-route/angular-route.js",
|
||||
"components/angular-resource/angular-resource.js",
|
||||
"components/angular-animate/angular-animate.js",
|
||||
"components/angular-bootstrap/ui-bootstrap-tpls.js",
|
||||
"components/angular-snap/angular-snap.js",
|
||||
"components/snapjs/snap.js",
|
||||
"components/angular-marked/angular-marked.js",
|
||||
"components/marked/marked.min.js",
|
||||
"components/highlightjs/highlight.pack.js",
|
||||
"components/angular-highlightjs/angular-highlightjs.js",
|
||||
"components/dotjem-angular-tree/dotjem-angular-tree.js",
|
||||
'app.js',
|
||||
'browser/*.js',
|
||||
'browser/**/*.js',
|
||||
'browser/*.html'
|
||||
],
|
||||
|
||||
autoWatch : true,
|
||||
|
||||
preprocessors: {
|
||||
'browser/*.html': ['ng-html2js']
|
||||
},
|
||||
|
||||
frameworks: ['jasmine'],
|
||||
|
||||
browsers : ['Chrome'],
|
||||
|
||||
plugins : [
|
||||
'karma-chrome-launcher',
|
||||
'karma-firefox-launcher',
|
||||
'karma-jasmine',
|
||||
'karma-ng-html2js-preprocessor',
|
||||
'karma-junit-reporter'
|
||||
],
|
||||
|
||||
junitReporter : {
|
||||
outputFile: 'test_out/unit.xml',
|
||||
suite: 'unit'
|
||||
},
|
||||
|
||||
captureTimeout: 120000,
|
||||
|
||||
customLaunchers: {
|
||||
Chrome_travis_ci: {
|
||||
base: 'Chrome',
|
||||
flags: ['--no-sandbox']
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if(process.env.TRAVIS){
|
||||
configuration.browsers = ['Chrome_travis_ci'];
|
||||
}
|
||||
|
||||
config.set(configuration);
|
||||
};
|
164
migrate.sh
164
migrate.sh
|
@ -1,164 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
function usage {
|
||||
echo "Usage: $0 [OPTION]..."
|
||||
echo "Migrate from the api-ref site into a from fairy-slipper"
|
||||
echo ""
|
||||
echo " -V, --virtual-env Always use virtualenv. Install automatically if not present"
|
||||
echo " -N, --no-virtual-env Don't use virtualenv. Run tests in local environment"
|
||||
echo " -n, --no-site-packages Isolate the virtualenv from the global Python environment"
|
||||
echo " -f, --force Force a clean re-build of the virtual environment. Useful when dependencies have been added."
|
||||
echo " -u, --update Update the virtual environment with any newer package versions"
|
||||
echo " -h, --help Print this usage message"
|
||||
echo " -d, --debug Run tests with testtools instead of testr. This allows you to use PDB"
|
||||
echo " --docs-only Only generate docs"
|
||||
echo " --verbose-docs Verbose logging of document generation"
|
||||
echo " --docbkx2json Only perform docbook to json conversion"
|
||||
echo " --wadl2swagger Only perform wadl to swagger-ish conversion"
|
||||
echo " --wadl2swaggervalid Only perform wadl to valid swagger conversion"
|
||||
echo " --swagger2rst Only perform swagger to rst conversion"
|
||||
echo " --swaggerandmd Only perform markdown update to swagger"
|
||||
echo " --create-yamls Create yaml parameter files"
|
||||
}
|
||||
|
||||
venv=.venv
|
||||
with_venv=tools/with_venv.sh
|
||||
always_venv=0
|
||||
never_venv=0
|
||||
no_site_packages=0
|
||||
debug=0
|
||||
force=0
|
||||
wrapper=""
|
||||
update=0
|
||||
docs_only=
|
||||
verbose_docs=""
|
||||
docbkx2json=
|
||||
wadl2swagger=
|
||||
wadl2swaggervalid=
|
||||
swagger2rst=
|
||||
swaggerandmd=
|
||||
create_yamls=
|
||||
|
||||
if ! options=$(getopt -o VNnfuhd -l virtual-env,no-virtual-env,no-site-packages,force,update,help,debug,docs-only,verbose-docs,docbkx2json,wadl2swagger,wadl2swaggervalid,swagger2rst,swaggerandmd,create-yamls -- "$@")
|
||||
then
|
||||
# parse error
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
eval set -- "$options"
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
-h|--help) usage; exit;;
|
||||
-V|--virtual-env) always_venv=1; never_venv=0;;
|
||||
-N|--no-virtual-env) always_venv=0; never_venv=1;;
|
||||
-n|--no-site-packages) no_site_packages=1;;
|
||||
-f|--force) force=1;;
|
||||
-u|--update) update=1;;
|
||||
-d|--debug) debug=1;;
|
||||
--docs-only) docs_only=1;;
|
||||
--verbose-docs) verbose_docs="-v";;
|
||||
--docbkx2json) docbkx2json=1;;
|
||||
--wadl2swagger) wadl2swagger=1;;
|
||||
--wadl2swaggervalid) wadl2swaggervalid=1;;
|
||||
--swagger2rst) swagger2rst=1;;
|
||||
--swaggerandmd) swaggerandmd=1;;
|
||||
--create-yamls) create_yamls=1;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
cd `dirname "$0"`
|
||||
|
||||
if [ $no_site_packages -eq 1 ]; then
|
||||
installvenvopts="--no-site-packages"
|
||||
fi
|
||||
|
||||
if [ $never_venv -eq 0 ]
|
||||
then
|
||||
# Remove the virtual environment if --force used
|
||||
if [ $force -eq 1 ]; then
|
||||
echo "Cleaning virtualenv..."
|
||||
rm -rf ${venv}
|
||||
fi
|
||||
if [ $update -eq 1 ]; then
|
||||
echo "Updating virtualenv..."
|
||||
python tools/install_venv.py $installvenvopts
|
||||
fi
|
||||
if [ -e ${venv} ]; then
|
||||
wrapper="${with_venv}"
|
||||
else
|
||||
if [ $always_venv -eq 1 ]; then
|
||||
# Automatically install the virtualenv
|
||||
python tools/install_venv.py $installvenvopts
|
||||
wrapper="${with_venv}"
|
||||
else
|
||||
echo -e "No virtual environment found...create one? (Y/n) \c"
|
||||
read use_ve
|
||||
if [ "x$use_ve" = "xY" -o "x$use_ve" = "x" -o "x$use_ve" = "xy" ]; then
|
||||
# Install the virtualenv and run the test suite in it
|
||||
python tools/install_venv.py $installvenvopts
|
||||
wrapper=${with_venv}
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
function install_fairy_slipper {
|
||||
${wrapper} pip install -e .
|
||||
}
|
||||
|
||||
function install_api_site {
|
||||
if [ ! -d api-site ]; then
|
||||
git clone https://github.com/openstack/api-site.git
|
||||
fi
|
||||
}
|
||||
|
||||
function migrate_docbkx {
|
||||
if [ ! -d conversion_files ]; then
|
||||
mkdir conversion_files
|
||||
fi
|
||||
if [ ! -d conversion_files_valid ]; then
|
||||
mkdir conversion_files_valid
|
||||
fi
|
||||
if [ ! -d api_doc ]; then
|
||||
mkdir api_doc
|
||||
fi
|
||||
|
||||
generate_all=
|
||||
if [[ -z $docbkx2json && -z $wadl2swagger && -z $wadl2swaggervalid && -z $swagger2rst && -z $swaggerandmd && -z $create_yamls ]]; then
|
||||
generate_all=1
|
||||
fi
|
||||
|
||||
if [[ -n $docbkx2json || -n $generate_all ]]; then
|
||||
${wrapper} find api-site/api-ref/src/docbkx/ -name api-ref-\* -type f -exec fairy-slipper-docbkx-to-json -o conversion_files $verbose_docs {} \;
|
||||
fi
|
||||
|
||||
if [[ -n $wadl2swagger || -n $generate_all ]]; then
|
||||
${wrapper} find conversion_files -name api-ref\*json -type f -exec fairy-slipper-wadl-to-swagger -o conversion_files $verbose_docs {} \;
|
||||
fi
|
||||
|
||||
if [[ -n $wadl2swaggervalid || -n $generate_all ]]; then
|
||||
${wrapper} find conversion_files -name api-ref\*json -type f -exec fairy-slipper-wadl-to-swagger-valid -o conversion_files_valid $verbose_docs {} \;
|
||||
fi
|
||||
|
||||
if [[ -n $swagger2rst || -n $generate_all ]]; then
|
||||
${wrapper} find conversion_files_valid -name \*-swagger.json -type f -exec fairy-slipper-swagger-to-rst -o api_doc $verbose_docs {} \;
|
||||
fi
|
||||
|
||||
if [[ -n $swaggerandmd || -n $generate_all ]]; then
|
||||
${wrapper} find conversion_files_valid -name \*-swagger.json -type f -exec fairy-slipper-swagger-and-md -o conversion_files_valid $verbose_docs {} \;
|
||||
fi
|
||||
|
||||
# Clean out sub dirs under api_doc as create-yamls appends to file
|
||||
if [[ -n $create_yamls || -n $generate_all ]]; then
|
||||
${wrapper} find conversion_files -name \*-swagger.json -type f -exec fairy-slipper-create-yamls -o api_doc $verbose_docs {} \;
|
||||
fi
|
||||
}
|
||||
|
||||
if [ -z $docs_only ]; then
|
||||
install_fairy_slipper
|
||||
install_api_site
|
||||
fi
|
||||
|
||||
migrate_docbkx
|
|
@ -1,6 +0,0 @@
|
|||
[DEFAULT]
|
||||
|
||||
# The list of modules to copy from oslo-incubator.git
|
||||
|
||||
# The base module to hold the copy of openstack.common
|
||||
base=fairy_slipper
|
34
package.json
34
package.json
|
@ -1,34 +0,0 @@
|
|||
{
|
||||
"name": "fairy-slipper",
|
||||
"version": "0.0.0",
|
||||
"repository": {},
|
||||
"devDependencies": {
|
||||
"bower": "^1.3.1",
|
||||
"connect-livereload": "^0.5.3",
|
||||
"grunt": "^0.4.5",
|
||||
"grunt-connect-proxy": "^0.2.0",
|
||||
"grunt-contrib-connect": "^0.9.0",
|
||||
"grunt-contrib-watch": "^0.6.1",
|
||||
"grunt-reload": "^0.2.0",
|
||||
"jasmine-core": "^2.3.4",
|
||||
"karma": "~0.12",
|
||||
"karma-chrome-launcher": "^0.1.12",
|
||||
"karma-firefox-launcher": "^0.1.6",
|
||||
"karma-jasmine": "^0.3.5",
|
||||
"karma-junit-reporter": "^0.2.2",
|
||||
"karma-ng-html2js-preprocessor": "^0.1.2",
|
||||
"matchdep": "^0.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"postinstall": "bower install --config.analytics=false",
|
||||
"pretest": "npm install",
|
||||
"test": "karma start karma.conf.js",
|
||||
"test-single-run": "karma start karma.conf.js --single-run"
|
||||
},
|
||||
"dependencies": {
|
||||
"json-schema-remote": "^0.1.5"
|
||||
}
|
||||
}
|
134
public/app.css
134
public/app.css
|
@ -1,134 +0,0 @@
|
|||
body, h1, h2, h3, h4, h5, h6 {
|
||||
font-family: "Open Sans",Helvetica,Arial,sans-serif;
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
.snap-content {
|
||||
background: #fff;
|
||||
box-shadow: 0 0 20px #bccfde;
|
||||
margin-left: 30px;
|
||||
}
|
||||
|
||||
@media (min-width: 768px) {
|
||||
.snap-open{
|
||||
// I use 180 for my snap drawer width,
|
||||
// change this to your config
|
||||
right:180px !important;
|
||||
}
|
||||
}
|
||||
|
||||
.sidebar-nav > li > a {
|
||||
padding: 5px 15px;
|
||||
|
||||
}
|
||||
|
||||
.sidebar-nav > li > .sidebar-nav {
|
||||
margin-left: 10px;
|
||||
|
||||
}
|
||||
|
||||
dd {
|
||||
margin-left: 10px;
|
||||
}
|
||||
|
||||
|
||||
.accordion-toggle h3 {
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
a.accordion-toggle:focus, a.accordion-toggle:hover {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.operation-header h3{
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.operation-header small{
|
||||
font-size: 65%;
|
||||
}
|
||||
|
||||
.operation-method {
|
||||
display: inline-block;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
.operation-title {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.swagger-method {
|
||||
vertical-align: top;
|
||||
top: 0.5em;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.content {
|
||||
margin: 10px;
|
||||
}
|
||||
|
||||
.snap-draw {
|
||||
background: #edf2f7 none repeat scroll 0 0;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
margin: 10px 10px 10px 30px;
|
||||
}
|
||||
|
||||
.sidebar .nav-title{
|
||||
text-transform: uppercase;
|
||||
margin: 5px;
|
||||
display: block;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.sidebar .btn-group {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.sidebar .btn-group .btn{
|
||||
width: 50%;
|
||||
}
|
||||
|
||||
.sidebar .li {
|
||||
border-bottom: 1px solid #c8d8e5;
|
||||
color: #2a4e68;
|
||||
}
|
||||
|
||||
.sidebar a {
|
||||
color: #2a4e68;
|
||||
}
|
||||
|
||||
.sidebar .selected > a, .sidebar .selected > a:hover, .sidebar .selected > a:focus{
|
||||
background: #496e8a;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.sidebar a:hover {
|
||||
background: #bccfde;
|
||||
color: #2a4e68;
|
||||
}
|
||||
|
||||
.dragger {
|
||||
background: #496e8a;
|
||||
color: #fff;
|
||||
font-size: 24px;
|
||||
height: 100%;
|
||||
width: 30px
|
||||
top: 0;
|
||||
align-content: center;
|
||||
position: fixed !important;
|
||||
}
|
||||
|
||||
|
||||
.dragger .glyphicon {
|
||||
float: left;
|
||||
margin: 0 auto;
|
||||
top: 50%;
|
||||
}
|
||||
|
||||
/* Disable all boarders and shadows on accordians */
|
||||
.panel {
|
||||
border: 0;
|
||||
box-shadow: 0 0 0 0;
|
||||
}
|
|
@ -1,35 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
angular.module('fairySlipper', [
|
||||
'ngRoute',
|
||||
'snap',
|
||||
'fairySlipper.browser',
|
||||
'fairySlipper.index'
|
||||
]).
|
||||
config(['$routeProvider', function($routeProvider) {
|
||||
$routeProvider.otherwise({redirectTo: '/'});
|
||||
}]);
|
||||
|
||||
|
||||
// Speed up calls to hasOwnProperty
|
||||
var hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
|
||||
function isEmpty(obj) {
|
||||
|
||||
// null and undefined are "empty"
|
||||
if (obj == null) return true;
|
||||
|
||||
// Assume if it has a length property with a non-zero value
|
||||
// that that property is correct.
|
||||
if (obj.length > 0) return false;
|
||||
if (obj.length === 0) return true;
|
||||
|
||||
// Otherwise, does it have any properties of its own?
|
||||
// Note that this doesn't handle
|
||||
// toString and valueOf enumeration bugs in IE < 9
|
||||
for (var key in obj) {
|
||||
if (hasOwnProperty.call(obj, key)) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
|
@ -1,209 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
angular.module('fairySlipper.browser', [
|
||||
'ngRoute',
|
||||
'ngResource',
|
||||
'ngAnimate',
|
||||
'hc.marked',
|
||||
'ui.bootstrap',
|
||||
'hljs',
|
||||
'dotjem.angular.tree'
|
||||
])
|
||||
.config(['$routeProvider', function($routeProvider) {
|
||||
$routeProvider.when('/by-path/:service/:version/', {
|
||||
templateUrl: 'browser/by-path.html',
|
||||
controller: 'ByPathCtrl'
|
||||
});
|
||||
}])
|
||||
|
||||
.config(['$routeProvider', function($routeProvider) {
|
||||
$routeProvider.when('/by-tag/:service/:version/', {
|
||||
templateUrl: 'browser/by-tag.html',
|
||||
controller: 'ByTagCtrl'
|
||||
});
|
||||
}])
|
||||
|
||||
.config(['markedProvider', function(markedProvider) {
|
||||
markedProvider.setRenderer({
|
||||
table: function(header, body) {
|
||||
return '<table class="table">\n'
|
||||
+ '<thead>\n'
|
||||
+ header
|
||||
+ '</thead>\n'
|
||||
+ '<tbody>\n'
|
||||
+ body
|
||||
+ '</tbody>\n'
|
||||
+ '</table>\n';
|
||||
}
|
||||
});
|
||||
}])
|
||||
|
||||
.factory('Service', ['$resource', function($resource) {
|
||||
return $resource('/doc/:service/:version/', {
|
||||
}, {
|
||||
}, {
|
||||
stripTrailingSlashes: false
|
||||
});
|
||||
}])
|
||||
|
||||
.directive('swaggerExample', ['$http', function($http) {
|
||||
function link(scope, element, attrs) {
|
||||
scope.language = 'json';
|
||||
|
||||
var mimes = [];
|
||||
angular.forEach(scope.source, function(value, key) {
|
||||
this.push(key);
|
||||
}, mimes);
|
||||
|
||||
if (!scope.mimetype) {
|
||||
scope.mimetype = mimes[0];
|
||||
}
|
||||
|
||||
var load = function(newValue, oldValue) {
|
||||
if (newValue && scope.source && ! scope.example) {
|
||||
$http.get('/doc/' + scope.swagger.info.service + '/' +
|
||||
scope.source[scope.mimetype].$ref +
|
||||
'/').success(function(data){
|
||||
// force conversion to a string. AngularJS is too
|
||||
// smart sometimes.
|
||||
if (typeof data != 'string') {
|
||||
scope.example = JSON.stringify(data, undefined, 2);
|
||||
} else {
|
||||
scope.example = data;
|
||||
}
|
||||
});
|
||||
}};
|
||||
|
||||
scope.$watch('triggerLoad', load);
|
||||
if (scope.triggerLoad) {
|
||||
load();
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
source: '=src',
|
||||
swagger: '=swagger',
|
||||
mimetype: '=mimetype',
|
||||
triggerLoad: '=triggerLoad'
|
||||
},
|
||||
link: link,
|
||||
templateUrl: 'browser/swagger-example.html'
|
||||
};
|
||||
}])
|
||||
|
||||
.directive('swaggerMethod', ['$http', function($http) {
|
||||
function link(scope, element, attrs) {
|
||||
var classes = {
|
||||
get: 'label-success',
|
||||
options: 'label-success',
|
||||
head: 'label-success',
|
||||
post: 'label-primary',
|
||||
put: 'label-warning',
|
||||
patch: 'label-warning',
|
||||
copy: 'label-warning',
|
||||
delete: 'label-danger'
|
||||
};
|
||||
|
||||
if (classes[scope.method]) {
|
||||
scope.label_class = classes[scope.method];
|
||||
} else {
|
||||
scope.label_class = 'label-default';
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
method: '=method'
|
||||
},
|
||||
link: link,
|
||||
templateUrl: 'browser/swagger-method.html'
|
||||
};
|
||||
}])
|
||||
|
||||
.directive('swaggerPath', ['$http', '$sce', function($http, $sce) {
|
||||
function link(scope, element, attrs) {
|
||||
var path = urldescription.parse(scope.path);
|
||||
scope.annotated_path = $sce.trustAsHtml(path.annotate(scope.parameters));
|
||||
}
|
||||
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
path: '=path',
|
||||
parameters: '=parameters'
|
||||
},
|
||||
link: link,
|
||||
template: '<div ng-bind-html="annotated_path"></div>'
|
||||
};
|
||||
}])
|
||||
|
||||
.directive('swaggerSchema', ['$http', function($http) {
|
||||
function link(scope, element, attrs) {
|
||||
if (scope.parameters && scope.parameters[0].schema.$ref) {
|
||||
$http.get('/doc/' + scope.swagger.info.service + '/' +
|
||||
scope.parameters[0].schema.$ref + '/')
|
||||
.success(function(data){
|
||||
scope.schema = data;
|
||||
});
|
||||
}
|
||||
}
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
swagger: '=swagger',
|
||||
parameters: '=parameters'
|
||||
},
|
||||
link: link,
|
||||
templateUrl: 'browser/swagger-schema.html'
|
||||
};
|
||||
}])
|
||||
|
||||
.controller('ParametersCtrl', ['$scope', function($scope) {
|
||||
$scope.parameters = {};
|
||||
angular.forEach($scope.operation.parameters, function (item) {
|
||||
if (! $scope.parameters[item.in]) {
|
||||
$scope.parameters[item.in] = [];
|
||||
}
|
||||
$scope.parameters[item.in].push(item);
|
||||
});
|
||||
}])
|
||||
|
||||
.controller('ByPathCtrl', ['$scope', '$http', '$routeParams', 'Service', function($scope, $http, $routeParams, Service) {
|
||||
$scope.isEmpty = isEmpty;
|
||||
Service.get({
|
||||
service: $routeParams.service,
|
||||
version: $routeParams.version
|
||||
}).$promise.then(function (data) {
|
||||
$scope.swagger = data;
|
||||
$scope.paths = Object.keys(data.paths).map(function (key) {
|
||||
var value = data.paths[key];
|
||||
return Object.defineProperty(value, '$key', { enumerable: false, value: key});
|
||||
});
|
||||
});
|
||||
}])
|
||||
|
||||
.controller('ByTagCtrl', ['$scope', '$http', '$routeParams', 'Service', function($scope, $http, $routeParams, Service) {
|
||||
$scope.isEmpty = isEmpty;
|
||||
Service.get({
|
||||
service: $routeParams.service,
|
||||
version: $routeParams.version
|
||||
}).$promise.then(function (data) {
|
||||
$scope.swagger = data;
|
||||
$scope.operations = {};
|
||||
Object.keys(data.paths).map(function (path) {
|
||||
var operations = data.paths[path];
|
||||
angular.forEach(operations, function (operation) {
|
||||
angular.forEach(operation.tags, function (tag) {
|
||||
operation['path'] = path;
|
||||
if (! $scope.operations[tag]) {
|
||||
$scope.operations[tag] = [];
|
||||
}
|
||||
$scope.operations[tag].push(operation);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}]);
|
|
@ -1,99 +0,0 @@
|
|||
<div class="page-header">
|
||||
<h1>
|
||||
{{ swagger.info.title }}
|
||||
</h1>
|
||||
</div>
|
||||
|
||||
<div ng-repeat="operations in paths | orderBy: '$key'">
|
||||
<h3>
|
||||
<swagger-path path="operations.$key"
|
||||
parameters="operations[0].parameters">
|
||||
</swagger-path>
|
||||
</h3>
|
||||
<accordion close-others="false">
|
||||
<accordion-group ng-repeat="operation in operations"
|
||||
is-open="operation_open">
|
||||
<accordion-heading>
|
||||
<div class="operation-header">
|
||||
<i class="pull-right glyphicon"
|
||||
ng-class="{'glyphicon-menu-down': operation_open, 'glyphicon-menu-right': !operation_open}">
|
||||
</i>
|
||||
<div class="operation-method">
|
||||
<swagger-method method="operation.method"></swagger-method>
|
||||
</div>
|
||||
<div class="operation-title">
|
||||
{{ operation.title }}
|
||||
<br/>
|
||||
<small>{{ operation.summary }}</small>
|
||||
</div>
|
||||
</div>
|
||||
</accordion-heading>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
</div>
|
||||
<div ng-controller="ParametersCtrl" class="col-lg-6">
|
||||
<tabset>
|
||||
<tab heading="Description">
|
||||
<div class="content" ng-if="!isEmpty(operation.description)"
|
||||
marked="operation.description">
|
||||
</div>
|
||||
<ng-include ng-if="isEmpty(operation.description)"
|
||||
src="'browser/undocumented-operation.html'">
|
||||
</ng-include>
|
||||
</tab>
|
||||
<tab heading="Details">
|
||||
<div ng-if="parameters.header">
|
||||
<h5>Headers</h5>
|
||||
<dl>
|
||||
<dt ng-repeat-start="parameter in parameters.header">{{parameter.name}}
|
||||
</dt>
|
||||
<dd ng-repeat-end><span marked="parameter.description"/></dd>
|
||||
</dl>
|
||||
</div>
|
||||
<div ng-if="parameters.query">
|
||||
<h5>URL Parameters</h5>
|
||||
<dl>
|
||||
<dt ng-repeat-start="parameter in parameters.query">{{parameter.name}}
|
||||
</dt>
|
||||
<dd ng-repeat-end><span marked="parameter.description"/></dd>
|
||||
</dl>
|
||||
</div>
|
||||
<div ng-if="parameters.body">
|
||||
<h5>Request Schema</h5>
|
||||
<swagger-schema swagger="swagger" parameters="parameters.body"></swagger-schema>
|
||||
</div>
|
||||
</tab>
|
||||
</tabset>
|
||||
</div>
|
||||
<div class="col-lg-6">
|
||||
<h4>Request</h4>
|
||||
<swagger-example ng-if="!isEmpty(operation.examples)"
|
||||
swagger="swagger"
|
||||
src="operation.examples"
|
||||
trigger-load="operation_open">
|
||||
</swagger-example>
|
||||
<div ng-if="isEmpty(operation.examples)" class="well text-muted">
|
||||
No request recorded.
|
||||
</div>
|
||||
<h4>Responses</h4>
|
||||
<accordion close-others="true">
|
||||
<accordion-group ng-repeat="(status_code, response) in operation.responses"
|
||||
is-disabled="isEmpty(response.examples)"
|
||||
is-open="status.open">
|
||||
<accordion-heading>
|
||||
{{ status_code }}: {{ response.description }}
|
||||
<i ng-if="!isEmpty(response.examples)"
|
||||
class="pull-right glyphicon"
|
||||
ng-class="{'glyphicon-menu-down': status.open, 'glyphicon-menu-right': !status.open}"></i>
|
||||
</accordion-heading>
|
||||
<swagger-example swagger="swagger"
|
||||
src="response.examples"
|
||||
trigger-load="status.open">
|
||||
</swagger-example>
|
||||
</accordion-group>
|
||||
</accordion>
|
||||
</div>
|
||||
</div>
|
||||
</accordion-group>
|
||||
</accordion>
|
||||
</div>
|
|
@ -1,103 +0,0 @@
|
|||
<div class="page-header">
|
||||
<h1>
|
||||
{{ swagger.info.title }}
|
||||
</h1>
|
||||
</div>
|
||||
|
||||
<div ng-repeat="tag in swagger.tags">
|
||||
<h3>{{ tag.summary }}</h3>
|
||||
<div marked="tag.description"></div>
|
||||
<accordion close-others="false">
|
||||
<accordion-group ng-repeat="operation in operations[tag.name]"
|
||||
is-open="operation_open">
|
||||
<accordion-heading>
|
||||
<div class="operation-header">
|
||||
<i class="pull-right glyphicon"
|
||||
ng-class="{'glyphicon-menu-down': operation_open, 'glyphicon-menu-right': !operation_open}">
|
||||
</i>
|
||||
<div class="operation-method">
|
||||
<swagger-method method="operation.method"></swagger-method>
|
||||
</div>
|
||||
<div class="operation-title">
|
||||
{{ operation.title }}
|
||||
<br/>
|
||||
<small>{{ operation.summary }}</small>
|
||||
</div>
|
||||
</div>
|
||||
</accordion-heading>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<h4>
|
||||
<swagger-path path="operation.path"
|
||||
parameters="operation.parameters">
|
||||
</swagger-path>
|
||||
</h4>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div ng-controller="ParametersCtrl" class="col-lg-6">
|
||||
<tabset>
|
||||
<tab heading="Description">
|
||||
<div class="content" ng-if="!isEmpty(operation.description)"
|
||||
marked="operation.description">
|
||||
</div>
|
||||
<ng-include ng-if="isEmpty(operation.description)"
|
||||
src="'browser/undocumented-operation.html'">
|
||||
</ng-include>
|
||||
</tab>
|
||||
<tab heading="Details">
|
||||
<div ng-if="parameters.header">
|
||||
<h5>Headers</h5>
|
||||
<dl>
|
||||
<dt ng-repeat-start="parameter in parameters.header">{{parameter.name}}
|
||||
</dt>
|
||||
<dd ng-repeat-end><span marked="parameter.description"/></dd>
|
||||
</dl>
|
||||
</div>
|
||||
<div ng-if="parameters.query">
|
||||
<h5>URL Parameters</h5>
|
||||
<dl>
|
||||
<dt ng-repeat-start="parameter in parameters.query">{{parameter.name}}
|
||||
</dt>
|
||||
<dd ng-repeat-end><span marked="parameter.description"/></dd>
|
||||
</dl>
|
||||
</div>
|
||||
<div ng-if="parameters.body">
|
||||
<h5>Request Schema</h5>
|
||||
<swagger-schema swagger="swagger" parameters="parameters.body"></swagger-schema>
|
||||
</div>
|
||||
</tab>
|
||||
</tabset>
|
||||
</div>
|
||||
<div class="col-lg-6">
|
||||
<h4>Request</h4>
|
||||
<swagger-example ng-if="!isEmpty(operation.examples)"
|
||||
swagger="swagger"
|
||||
src="operation.examples"
|
||||
trigger-load="operation_open">
|
||||
</swagger-example>
|
||||
<div ng-if="isEmpty(operation.examples)" class="well text-muted">
|
||||
No request recorded.
|
||||
</div>
|
||||
<h4>Responses</h4>
|
||||
<accordion close-others="true">
|
||||
<accordion-group ng-repeat="(status_code, response) in operation.responses"
|
||||
is-disabled="isEmpty(response.examples)"
|
||||
is-open="status.open">
|
||||
<accordion-heading>
|
||||
{{ status_code }}: {{ response.description }}
|
||||
<i ng-if="!isEmpty(response.examples)"
|
||||
class="pull-right glyphicon"
|
||||
ng-class="{'glyphicon-menu-down': status.open, 'glyphicon-menu-right': !status.open}"></i>
|
||||
</accordion-heading>
|
||||
<swagger-example swagger="swagger"
|
||||
src="response.examples"
|
||||
trigger-load="status.open">
|
||||
</swagger-example>
|
||||
</accordion-group>
|
||||
</accordion>
|
||||
</div>
|
||||
</div>
|
||||
</accordion-group>
|
||||
</accordion>
|
||||
</div>
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue