Initial import of the code

Change-Id: I1b8c391f841795318485e03c4f0222c5e19e422b
This commit is contained in:
Artem Goncharov
2024-10-04 18:18:09 +02:00
parent 8feb4b62ee
commit b8e9951c1a
48 changed files with 183565 additions and 0 deletions

160
.gitignore vendored Normal file
View File

@@ -0,0 +1,160 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/

29
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,29 @@
---
default_language_version:
# force all unspecified python hooks to run python3
python: python3
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: trailing-whitespace
- id: mixed-line-ending
args: ['--fix', 'lf']
exclude: '.*\.(svg)$'
- id: check-byte-order-marker
- id: check-executables-have-shebangs
- id: check-merge-conflict
- id: debug-statements
- id: check-yaml
files: .*\.(yaml|yml)$
exclude: '^zuul.d/.*$'
- repo: https://github.com/PyCQA/doc8
rev: v1.1.1
hooks:
- id: doc8
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.7
hooks:
- id: ruff
args: ['--fix', '--unsafe-fixes']
- id: ruff-format

201
LICENSE Normal file
View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

6
README.rst Normal file
View File

@@ -0,0 +1,6 @@
OpenStack OpenAPI specs
=======================
This project is currently (temporarily until it is all upstreamed) hosting OpenAPI specs of diverse OpenStack APIs and a Sphinx extension to render them into html in the OpenStack style
**This is all a work in progress**

7
doc/requirements.txt Normal file
View File

@@ -0,0 +1,7 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
pbr!=2.1.0,>=2.0.0
sphinx>=2.0.0,!=2.1.0 # BSD
# sphinxcontrib-openapi>=0.8 # BSD
openstackdocstheme>=2.2.0 # Apache-2.0

View File

@@ -0,0 +1,4 @@
Block Storage
=============
... openapi:: ../../specs/block-storage/v3.yaml

5
doc/source/compute.rst Normal file
View File

@@ -0,0 +1,5 @@
Compute
=======
.. openapi:: ../../specs/compute/v2.yaml
:service_type: compute

20
doc/source/conf.py Normal file
View File

@@ -0,0 +1,20 @@
import os
# Unfortunately, Sphinx doesn't support code highlighting for standard
# reStructuredText `code` directive. So let's register 'code' directive
# as alias for Sphinx's own implementation.
#
# https://github.com/sphinx-doc/sphinx/issues/2155
from docutils.parsers.rst import directives
from sphinx.directives.code import CodeBlock
directives.register_directive("code", CodeBlock)
project = "openstack-openapi"
extensions = ["openstackdocstheme", "os_openapi"]
source_suffix = ".rst"
master_doc = "index"
exclude_patterns = ["_build"]
pygments_style = "default"
html_theme = "openstackdocs"

5
doc/source/identity.rst Normal file
View File

@@ -0,0 +1,5 @@
Identity
========
.. openapi:: ../../specs/identity/v3.yaml
:service_type: identity

4
doc/source/image.rst Normal file
View File

@@ -0,0 +1,4 @@
Image
=====
... openapi:: ../../specs/image/v2.yaml

174
doc/source/index.rst Normal file
View File

@@ -0,0 +1,174 @@
==============
OpenStack APIs
==============
This project (currently in a POC phase) serves 2 different purposes:
- stores individual OpenAPI specs for the services (it feels more logical
to have a central storage of specs to make it easier for the consumers to
find them and to enforce certain spec rules, especially with OpenStack
APIs being not very OpenAPI conform)
- implement a Sphinx extension to render the specs into the HTML. Currently
it does so in a style that is a mix of a Swagger style and old os-api-ref
style for OpenStack.
OpenAPI specifics
=================
Not all OpenStack APIs are fitting properly into the OpenAPI specification. In order still be able to provide the OpenAPI specs for those services certain decisions (workarounds) have been made
Microversion
~~~~~~~~~~~~
A concept of microversions in OpenStack is allowing using of different
operation schema depending on the version header. This is not very well
addressed by OpenAPI, but also a workaround for that is existing. Since
OpenAPI bases on the JsonSchema 3.1 it is possible to use "oneOf" construct
to describe different schemas. In order for the OpenStack tooling to be able to describe and recognize this properly a it is required to mark such schema with custom "x-" extension
.. code-block:: yaml
components:
schemas:
foo_with_mv:
oneOf:
- $ref: #/components/schemas/foo_v1
- $ref: #/components/schemas/foo_v21
- $ref: #/components/schemas/foo_v220
x-openstack:
discriminator: microversion
foo_v21:
type: object
properties:
- foo:
type: string
x-openstack:
min-ver: 2.1
max-ver: 2.19
foo_v220:
type: object
properties:
- foo:
type: string
x-openstack:
min-ver: 2.20
.. note::
`min-ver` and `max-ver` properties are having the same
meaning as in the services: starting with which microversion
the schema has been added and till which microversion it
eventually is valid
Action
~~~~~~
Minority of OpenStack services (but in the most widely used places) have a
concept of actions. This was inspired by RPC where depending on the operation payload different actions are being performed.
OpenAPI is currently strictly requiring that a combination of URL + http
method must be unique. Since Actions require quite opposite also here a
similar solution like for microversions can be applied.
.. code-block:: yaml
components:
schemas:
server_actions:
oneOf:
- $ref: #/components/schemas/action_foo
- $ref: #/components/schemas/action_bar
x-openstack:
discriminator: action
action_foo:
type: object
properties:
- foo:
type: string
x-openstack:
action-name: foo
min-ver: 2.1
max-ver: 2.19
action_bar:
type: object
properties:
- bar:
type: integer
x-openstack:
action-name: bar
min-ver: 2.20
.. note:: it is possible even to combine those methods when a certain action
is also supporting different microversions. For this on a first level
there is still an "action" discriminator is being used and the action body
schema itself is also an "oneOf" schema setting discriminator to
"microversion".
Flexible HTTP headers
~~~~~~~~~~~~~~~~~~~~~
Mostly Swift is allowing custom headers both in request and response. In the
current form OpenAPI requires that all headers are explicitly described. In
order to deal with this situation a "regexp" form of the headers can be user.
.. code-block:: yaml
...
responses:
'200':
description: OK
headers:
X-Account-Meta-*:
$ref: '#/components/headers/X-Account-Meta'
components:
headers:
X-Account-Meta:
x-openstack:
style: regex
...
Path requirements
=================
For a long time (and still in some places) Services declare their APIs
requiring some form of `project_id` as part of the operation URL. Ohers place
version prefix while yet others do not. In order to bring consistency and fit
specs into the OpenAPI concept it is required that version prefix IS part of
the url. This brings assumption to the tooling relying on the specs that the
URL is appended behind the "version discovery" endpoint of the service. The
tooling is, however, advised to apply additional logic of avoiding certain
path elements duplication when service catalog points to the versioned
service endpoint. This requirement helps solving routing issues in the client
facing tool with determination of a service "root".
The spec is also defining the API version (which may look like "2.92" to
communicate maximal microversion)
Spec generation
===============
All specs provided here are generated automatically from the source code of the services using `openstack-codegenerator <https://gtema.github.io/openstack-codegenerator>`_ project. It is a conscious decision not to deal with specs manually due to their size, complexity and the issues described above.
Development state
=================
At the moment all specs (except object-store) are created automatically. HTML
rendering is at a very early stage not properly implementing actions and
microversions rendering and instead renders every URL like the Swagger would
do. This is going to change once more time is going to be invested on this
front.
.. toctree::
:maxdepth: 1
block-storage
compute
identity
image
load-balancer
network
object-store
placement

View File

@@ -0,0 +1,4 @@
Load Balancing
==============
... openapi:: ../../specs/load-balancer/v2.yaml

4
doc/source/network.rst Normal file
View File

@@ -0,0 +1,4 @@
Network
=======
... openapi:: ../../specs/network/v2.yaml

View File

@@ -0,0 +1,4 @@
Object Store
============
... openapi:: ../../specs/object-store/v1.yaml

4
doc/source/placement.rst Normal file
View File

@@ -0,0 +1,4 @@
Placement
=========
... openapi:: ../../specs/placement/v1.yaml

900
os_openapi/__init__.py Normal file
View File

@@ -0,0 +1,900 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
os_openapi
---------------------
The OpenAPI spec renderer for Sphinx. It's a new way to document your
RESTful API. Based on ``sphinxcontrib-openapi``.
"""
import functools
import os
import json
import typing as ty
from typing import Any
from urllib.parse import urlsplit
from urllib.request import urlopen
from ruamel.yaml import YAML
import jsonschema
import collections
import collections.abc
from contextlib import closing
from sphinx.util import logging
import pbr.version
from docutils import nodes
from docutils.parsers.rst import directives
from sphinx.util.docutils import SphinxDirective
from sphinx.util import osutil
from myst_parser.mdit_to_docutils.base import make_document
from myst_parser.parsers.docutils_ import Parser
__version__ = pbr.version.VersionInfo("os_openapi").version_string()
LOG = logging.getLogger(__name__)
# Locally cache spec to speedup processing of same spec file in multiple
# openapi directives
@functools.lru_cache
def _get_spec(abspath, encoding):
with open(abspath, encoding=encoding) as stream:
# It is important to use ruamel since it goes for YAML1.2 which
# properly understands quotes for nova boolean enum values
yaml = YAML(typ="safe")
return yaml.load(stream)
class openapi(nodes.Part, nodes.Element):
"""OpenAPI node"""
pass
class openapi_operation_header(nodes.Part, nodes.Element):
"""Operation Header node"""
pass
class openapi_operation_tab(nodes.Part, nodes.Element):
"""Operation tab node"""
pass
class openapi_operation_body_description(nodes.Part, nodes.Element):
"""Operation request node for representing it as tab"""
pass
class OpenApiRefResolver(jsonschema.RefResolver):
"""
Overrides resolve_remote to support both YAML and JSON
OpenAPI schemas.
"""
try:
import requests
_requests = requests
except ImportError:
_requests = None
def resolve_remote(self, uri):
scheme, _, path, _, _ = urlsplit(uri)
_, extension = os.path.splitext(path)
if extension not in [".yml", ".yaml"] or scheme in self.handlers:
return super().resolve_remote(uri)
if scheme in ["http", "https"] and self._requests:
response = self._requests.get(uri)
yaml = YAML()
result = yaml.safe_load(response.content)
else:
# Otherwise, pass off to urllib and assume utf-8
with closing(urlopen(uri)) as url:
response = url.read().decode("utf-8")
yaml = YAML()
result = yaml.safe_load(response)
if self.cache_remote:
self.store[uri] = result
return result
def _resolve_refs(uri, spec):
"""Resolve JSON references in a given dictionary.
OpenAPI spec may contain JSON references to its nodes or external
sources, so any attempt to rely that there's some expected attribute
in the spec may fail. So we need to resolve JSON references before
we use it (i.e. replace with referenced object). For details see:
https://tools.ietf.org/html/draft-pbryan-zyp-json-ref-02
The input spec is modified in-place despite being returned from
the function.
"""
resolver = OpenApiRefResolver(uri, spec)
def _do_resolve(node, seen=[]):
if isinstance(node, collections.abc.Mapping) and "$ref" in node:
ref = node["$ref"]
with resolver.resolving(ref) as resolved:
if ref in seen:
return {
type: "object"
} # return a distinct object for recursive data type
return _do_resolve(
resolved, seen + [ref]
) # might have other references
elif isinstance(node, collections.abc.Mapping):
for k, v in node.items():
node[k] = _do_resolve(v, seen)
elif isinstance(node, (list, tuple)):
for i in range(len(node)):
node[i] = _do_resolve(node[i], seen)
return node
return _do_resolve(spec)
def normalize_spec(spec, **options):
"""Normalize OpenAPI spec for easier processing"""
# OpenAPI spec may contain JSON references, so we need resolve them
# before we access the actual values trying to build an httpdomain
# markup. Since JSON references may be relative, it's crucial to
# pass a document URI in order to properly resolve them.
spec = _resolve_refs(options.get("uri", ""), spec)
# OpenAPI spec may contain common endpoint's parameters top-level.
# In order to do not place if-s around the code to handle special
# cases, let's normalize the spec and push common parameters inside
# endpoints definitions.
for endpoint in spec.get("paths", {}).values():
parameters = endpoint.pop("parameters", [])
for method in endpoint.values():
method.setdefault("parameters", [])
method["parameters"].extend(parameters)
class OpenApiDirective(SphinxDirective):
"""Directive implementation"""
required_arguments = 1
option_spec = {
"source_encoding": directives.encoding,
"service_type": directives.unchanged,
}
parser: Parser
def run(self):
relpath, abspath = self.env.relfn2path(
directives.path(self.arguments[0])
)
# env = self.state.document.settings.env
# URI parameter is crucial for resolving relative references. So we
# need to set this option properly as it's used later down the
# stack.
self.options.setdefault("uri", f"file://%{abspath}")
# Add a given OpenAPI spec as a dependency of the referring
# reStructuredText document, so the document is rebuilt each time
# the spec is changed.
self.env.note_dependency(relpath)
# Read the spec using encoding passed to the directive or fallback to
# the one specified in Sphinx's config.
encoding = self.options.get("encoding", self.config.source_encoding)
spec: dict[str, Any] = _get_spec(abspath, encoding)
# spec filename as copied to
fname: str | None = None
normalize_spec(spec)
if "service_type" in self.options:
st = self.options.get("service_type")
# copy spec under the _static
fname = f"{st}_v{spec['info']['version']}.yaml"
dest = os.path.join(self.env.app.builder.outdir, "_static", "openapi", fname)
destdir = os.path.dirname(dest)
osutil.ensuredir(destdir)
LOG.info("Copying spec to: %s", dest)
osutil.copyfile(abspath, dest)
# Markdown -> docutils parser
self.parser = Parser()
results = []
for hdr in self._get_spec_header_nodes(spec, fname):
results.append(hdr)
for tag in spec.get("tags", ["default"]):
results.append(self._get_api_group_nodes(spec, tag))
return results
def _append_markdown_content(self, node, content: str):
"""Parse Markdown `content` and append it to docutils `node`"""
document = make_document(parser_cls=self.parser)
self.parser.parse(content, document)
for child in document:
node += child
def _get_spec_header_nodes(
self, spec: dict[str, Any], fname: str | None = None
):
"""Get spec header nodes"""
yield nodes.version("", spec["info"]["version"])
# if fname:
# yield nodes.field(
# "",
# nodes.field_name("", "Link"),
# nodes.field_body(
# "",
# nodes.paragraph(
# "",
# "",
# nodes.reference(
# "", nodes.Text(fname), refuri=f"_static/{fname}"
# ),
# ),
# ),
# )
description = spec["info"].get(
"description", spec["info"].get("summary")
)
if description:
node = nodes.rubric("")
self._append_markdown_content(node, description)
yield node
def _get_api_group_nodes(self, spec, tag):
"""Process OpenAPI tags (group)"""
tag_name = tag["name"]
targetid = f"group-{tag_name}"
section = nodes.section(
classes=["api-group", "accordion"], ids=[targetid]
)
section += nodes.title(text=tag_name)
group_descr = tag.get("description", "")
if group_descr:
self._append_markdown_content(section, group_descr)
for url, path_def in spec["paths"].items():
for method in ["head", "get", "post", "put", "patch", "delete"]:
if method in path_def and tag_name in path_def[method].get(
"tags"
):
operation_def = path_def[method]
for child in self._get_operation_nodes(
spec, url, method, operation_def
):
section += child
return section
def _get_operation_nodes(self, spec, path, method, operation_spec):
"""Process OpenAPI operation"""
# We might want to have multiple separate entries for single url
# (a.k.a. actions)
operation_specs = []
if not path.endswith("/action"):
body = (
operation_spec.get("requestBody", {})
.get("content", {})
.get("application/json", {})
.get("schema")
)
operation_specs.append((operation_spec, None, body))
else:
# Body
body = (
operation_spec.get("requestBody", {})
.get("content", {})
.get("application/json", {})
.get("schema")
)
if body:
actions = body.get("oneOf", [])
discriminator = body.get("x-openstack", {}).get(
"discriminator"
)
if actions and discriminator == "action":
for candidate in actions:
action_name = candidate.get("x-openstack", {}).get(
"action-name"
)
if not action_name:
# No action name on the body. Take 1st property
# name
action_name = list(candidate["properties"].keys())[
0
]
operation_specs.append(
(operation_spec, action_name, candidate)
)
else:
# This does not look like an action, just return operation
operation_specs.append((operation_spec, None, None))
else:
# This does not look like an action (no body), just return
# operation
operation_specs.append((operation_spec, None, None))
for operation_spec, action_name, request_body in operation_specs:
# Iterate over spec and eventual actions
op_id = (
operation_spec["operationId"]
.replace(":", "_")
.replace("/", "_")
)
if action_name:
op_id += f"-{action_name}"
op_suffix = ""
if operation_spec.get("deprecated", False):
op_suffix = "-deprecated"
container = nodes.section(
ids=[f"operation-{op_id}"],
classes=[
"accordion-item",
"operation" + op_suffix,
"operation-" + method,
"gy-2",
],
)
op_header = openapi_operation_header()
if not action_name:
op_header["summary"] = operation_spec.get("summary")
else:
if request_body and "summary" in request_body:
# For actions we store summary under the body
op_header["summary"] = request_body["summary"]
elif path.endswith("/action") and action_name:
op_header["summary"] = f"`{action_name}` action"
else:
op_header["summary"] = request_body.get(
"description", f"{action_name} action"
)
op_header["operationId"] = op_id
op_header["method"] = method
op_header["path"] = path
if op_header["summary"]:
container += nodes.title(text=op_header["summary"])
container += op_header
content = nodes.compound(
classes=["accordion-collapse collapse accordion-body"],
ids=[f"collapse{op_id}"],
)
descr = operation_spec.get("description")
if descr:
self._append_markdown_content(content, descr)
else:
# For actions we place their description as body description
if request_body and "description" in request_body:
self._append_markdown_content(
content, request_body["description"]
)
content += self._get_operation_request_node(
op_id, operation_spec, action_name, request_body
)
content += self._get_operation_response_node(
op_id, operation_spec, action_name
)
container += content
yield container
def _get_operation_request_node(
self, operationId, operation_spec, action_name=None, request_body=None
):
"""Build the Request section"""
request = nodes.section(ids=[f"api-req-{operationId}"])
if not request_body:
return request
request += nodes.title(text="Request")
operation_id = "operation-%d" % self.env.new_serialno(
"operation-description"
)
operation_node = openapi_operation_body_description(
"", ids=[operation_id]
)
# ul = nodes.bullet_list(
# "",
# classes=["nav", "nav-tabs", "requestbody"],
# role="tablist",
# ids=[f"request_{operationId}"],
# )
# li_table = nodes.list_item(
# "", classes=["nav-item", "requestbody-item"], role="presentation"
# )
# li_table += nodes.rubric(text="Description")
# li_schema = nodes.list_item(
# "", classes=["nav-item", "requestbody-item"], role="presentation"
# )
# li_schema["role"] = "presentation"
# li_schema += nodes.rubric(text="Schema")
# ul.extend([li_table, li_schema])
# request += ul
table = nodes.table()
tgroup = nodes.tgroup(cols=4)
for _ in range(4):
colspec = nodes.colspec(colwidth=1)
tgroup += colspec
table += tgroup
# Build table headers
thead = nodes.thead()
tgroup += thead
tr = nodes.row()
thead += tr
for col in ["Name", "Location", "Type", "Description"]:
tr += nodes.entry("", nodes.paragraph(text=col))
# Table data
rows = []
# Parameters
for param in operation_spec.get("parameters", []):
rows.append(self._get_request_table_param_row(param))
# Body
if not request_body:
body = (
operation_spec.get("requestBody", {})
.get("content", {})
.get("application/json", {})
.get("schema")
)
else:
body = request_body
for el in self._get_request_table_field_row(body, None, set()):
rows.append(el)
tbody = nodes.tbody()
tbody.extend(rows)
tgroup += tbody
if body:
for key, sample in body.get("examples", {}).items():
for el in self._get_body_examples(key, sample):
request += el
if rows:
# li_table += table
table_tab = openapi_operation_tab(
"",
ids=[
"op-d-%d"
% self.env.new_serialno("operation-request-descr")
],
classes=["show active"],
)
table_tab += table
operation_node["table_id"] = table_tab["ids"][0]
operation_node += table_tab
# jsonschema
jsonschema_pre = nodes.literal_block(
"", classes=["json", "highlight-javascript"]
)
jsonschema_pre.append(
nodes.literal(
text=json.dumps(request_body, indent=2),
language="json",
classes=["highlight", "code"],
)
)
schema_tab = openapi_operation_tab(
"",
ids=[
"op-s-%d" % self.env.new_serialno("operation-request-schema")
],
)
operation_node["schema_id"] = schema_tab["ids"][0]
schema_tab += jsonschema_pre
operation_node += schema_tab
# li_schema += jsonschema_pre
# operation_request["jsonschema"] = jsonschema_pre
request += operation_node
return request
def _get_operation_response_node(
self, operationId, operation_spec, action_name=None
):
"""Build the Response section"""
responses = nodes.section(ids=[f"api-res-{operationId}"])
responses += nodes.title(text="Responses")
response_specs = operation_spec.get("responses")
for code, response_spec in sorted(response_specs.items()):
rsp_id = "response-%d" % self.env.new_serialno("response")
response = nodes.section(ids=[rsp_id])
response += nodes.title(text=code)
descr = response_spec.get("description")
if descr:
self._append_markdown_content(response, descr)
responses += response
response_schema = None
operation_node = openapi_operation_body_description(
"", ids=[rsp_id]
)
table = nodes.table()
tgroup = nodes.tgroup(cols=4)
for _ in range(4):
colspec = nodes.colspec(colwidth=1)
tgroup += colspec
table += tgroup
# Build table headers
thead = nodes.thead()
tgroup += thead
tr = nodes.row()
thead += tr
for col in ["Name", "Location", "Type", "Description"]:
tr += nodes.entry("", nodes.paragraph(text=col))
# Table data
rows = []
# TODO(gtema) Operation may return headers
# for param in operation_spec.get("parameters", []):
# rows.append(self._get_request_table_param_row(param))
# Body
if not action_name:
response_schema = (
response_spec.get("content", {})
.get("application/json", {})
.get("schema")
)
else:
# Iterate over all available responses to find suitable action
# response
candidates = []
body_candidate = (
response_spec.get("content", {})
.get("application/json", {})
.get("schema")
)
if body_candidate:
candidates = body_candidate.get("oneOf", [])
if not candidates:
candidates.append(body_candidate)
for candidate in candidates:
os_ext = candidate.get("x-openstack", {})
rsp_act_name = os_ext.get("action-name")
if rsp_act_name == action_name:
response_schema = candidate
# TODO(gtema) how to properly identify response code of the
# action when it returns no body at all. This info is present
# on the server side, but is missing in openapi
if response_schema:
for el in self._get_request_table_field_row(
response_schema, None, set()
):
rows.append(el)
jsonschema_pre = nodes.literal_block(
"", classes=["json", "highlight-javascript"]
)
jsonschema_pre.append(
nodes.literal(
text=json.dumps(response_schema, indent=2),
language="json",
classes=["highlight", "code"],
)
)
schema_tab = openapi_operation_tab(
"",
ids=[
"op-rsp-s-%d"
% self.env.new_serialno("operation-response-schema")
],
)
schema_tab += jsonschema_pre
operation_node["schema_id"] = schema_tab["ids"][0]
operation_node += schema_tab
tbody = nodes.tbody()
tbody.extend(rows)
tgroup += tbody
if rows:
table_tab = openapi_operation_tab(
"",
ids=[
"op-rsp-d-%d"
% self.env.new_serialno("operation-response-descr")
],
classes=["show active"],
)
table_tab += table
operation_node["table_id"] = table_tab["ids"][0]
operation_node += table_tab
# response += table
if response_schema:
for key, sample in response_schema.get("examples", {}).items():
for el in self._get_body_examples(key, sample):
response += el
response += operation_node
return responses
def _get_request_table_param_row(self, param):
"""Build a row of a request parameters table with the
parameter/header"""
tr = nodes.row()
tr += nodes.entry("", nodes.paragraph(text=param["name"]))
tr += nodes.entry("", nodes.paragraph(text=param["in"]))
tr += nodes.entry("", nodes.paragraph(text=param["schema"]["type"]))
descr = nodes.entry("")
self._append_markdown_content(descr, param.get("description", ""))
tr += descr
return tr
def _get_request_table_field_row(self, field, field_name, emitted_fields):
"""Emit Request description table row for the body element"""
if not field:
return
typ = field.get("type")
note = None
os_ext = field.get("x-openstack", {})
if os_ext:
min_ver = os_ext.get("min-ver")
max_ver = os_ext.get("max-ver")
if min_ver:
note = f"<br/><strong>New in version {min_ver}</strong>"
if max_ver:
note = (
f"<br/><strong>Available until version {max_ver}</strong>"
)
param_descr = f'{field.get("description", "")}{note or ""}'
if typ == "object" and "properties" in field:
if (
field_name
and field_name not in emitted_fields
and f"{field_name}[]" not in emitted_fields
):
emitted_fields.add(field_name)
tr = nodes.row()
tr += nodes.entry("", nodes.paragraph(text=field_name))
tr += nodes.entry("", nodes.paragraph(text="body"))
tr += nodes.entry(
"", nodes.paragraph(text=field.get("type", ""))
)
td = nodes.entry("")
self._append_markdown_content(td, param_descr)
tr += td
yield tr
for k, v in field["properties"].items():
for el in self._get_request_table_field_row(
v, f"{field_name}.{k}" if field_name else k, emitted_fields
):
yield el
elif typ == "array":
items_typ = field.get("items")
emitted_fields.add(f"{field_name}[]")
tr = nodes.row()
tr += nodes.entry("", nodes.paragraph(text=field_name))
tr += nodes.entry("", nodes.paragraph(text="body"))
tr += nodes.entry("", nodes.paragraph(text="array"))
td = nodes.entry("")
self._append_markdown_content(td, param_descr)
tr += td
yield tr
for el in self._get_request_table_field_row(
items_typ, f"{field_name}[]", emitted_fields
):
yield el
elif typ:
if field_name and field_name not in emitted_fields:
emitted_fields.add(field_name)
tr = nodes.row()
tr += nodes.entry("", nodes.paragraph(text=field_name))
tr += nodes.entry("", nodes.paragraph(text="body"))
tr += nodes.entry(
"", nodes.paragraph(text=field.get("type", ""))
)
td = nodes.entry("")
self._append_markdown_content(td, param_descr)
tr += td
yield tr
if not typ and "oneOf" in field:
opts = field["oneOf"]
discriminator = field.get("x-openstack", {}).get("discriminator")
if discriminator == "microversion":
for opt in opts:
for el in self._get_request_table_field_row(
opt, field_name, emitted_fields
):
yield el
elif discriminator == "action":
for opt in opts:
for el in self._get_request_table_field_row(
opt, field_name, emitted_fields
):
yield el
def _get_body_examples(self, sample_key, sample):
"""Add body examples"""
p = nodes.paragraph("")
title = "Example"
if sample_key:
title += f" ({sample_key})"
p.append(nodes.strong(text=title))
yield p
pre = nodes.literal_block(
"", classes=["javascript", "highlight-javascript"]
)
pre.append(
nodes.literal(
text=sample,
language="javascript",
classes=["highlight", "code"],
)
)
# TODO(gtema): how to trigger activation of pygments?
yield pre
def visit_openapi_operation_header(self, node):
"""Render a bootstrap accordion for the operation header"""
tag_id = node["operationId"]
method = node["method"]
summary = node.get("summary", "")
path = "/".join(
[
f'<span class="path_parameter">{x}</span>' if x[0] == "{" else x
for x in node["path"].split("/")
if x
]
)
if path and path[0] != "/":
path = "/" + path
else:
path = "/"
self.body.append(
'<button class="accordion-button collapsed" type="button" '
f'data-bs-toggle="collapse" data-bs-target="#collapse{tag_id}" '
f'aria-expanded="false" aria-controls="collapse{tag_id}">'
)
self.body.append('<div class="container">')
self.body.append('<div class="row">')
self.body.append(
f'<div class="col-1"><span class="badge label-{method}">'
f"{method.upper()}</span></div>"
)
self.body.append(
f'<div class="col-11"><div class="operation-path">{path}</div>'
f'<div class="operation-summary">{summary or ""}</div></div>'
)
self.body.append("</div>")
self.body.append("</div>")
self.body.append("</button>")
raise nodes.SkipNode
def visit_openapi_operation_body(self, node):
self.body.append('<ul class="nav nav-tabs" role="tablist">')
tab_table_id = node.get("table_id")
tab_schema_id = node.get("schema_id")
if tab_table_id:
self.body.append('<li class="nav-item" role="presentation">')
self.body.append(
f'<button class="nav-link active" id="{tab_table_id}" data-bs-toggle="tab" data-bs-target="#{tab_table_id}-pane" type="button" role="tab" aria-controls="{tab_table_id}-pane" aria-selected="true">Description</button>'
)
self.body.append("</li>")
if tab_schema_id:
self.body.append('<li class="nav-item" role="presentation">')
self.body.append(
f'<button class="nav-link" id="{tab_schema_id}" data-bs-toggle="tab" data-bs-target="#{tab_schema_id}-pane" type="button" role="tab" aria-controls="{tab_schema_id}-pane" aria-selected="true">Schema</button>'
)
self.body.append("</li>")
self.body.append("</ul>")
self.body.append('<div class="tab-content">')
def depart_openapi_operation_body(self, node):
self.body.append("</div>")
def visit_openapi_operation_tab(self, node):
classes = " ".join(node.get("classes", []))
self.body.append(
f'<div id="{node["ids"][0]}-pane" class="tab-pane fade {classes}" role="tabpanel" aria-labeledby="{node["ids"][0]}" tabindex="0">'
)
def depart_openapi_operation_tab(self, node):
self.body.append("</div>")
def copy_assets(app, exception) -> None:
"""Copy registered assets to the output"""
assets: list[str] = ["api-ref.css"]
builders: list[str] = [
"html",
"readthedocs",
"readthedocssinglehtmllocalmedia",
]
if app.builder.name not in builders or exception:
return
LOG.info("Copying assets: %s", ", ".join(assets))
for asset in assets:
base_dest = os.path.join(app.builder.outdir, "_static")
osutil.ensuredir(base_dest)
dest = os.path.join(base_dest, asset)
source = os.path.abspath(os.path.dirname(__file__))
osutil.copyfile(os.path.join(source, "assets", asset), dest)
def add_assets(app) -> None:
"""Register assets"""
app.add_css_file("api-ref.css")
def setup(app) -> dict[str, bool]:
app.add_node(
openapi_operation_header, html=(visit_openapi_operation_header, None)
)
app.add_node(
openapi_operation_body_description,
html=(visit_openapi_operation_body, depart_openapi_operation_body),
)
app.add_node(
openapi_operation_tab,
html=(visit_openapi_operation_tab, depart_openapi_operation_tab),
)
# This specifies all our directives that we're adding
app.add_directive("openapi", OpenApiDirective)
app.connect("builder-inited", add_assets)
# This copies all the assets (css, js, fonts) over to the build
# _static directory during final build.
app.connect("build-finished", copy_assets)
return {
"parallel_read_safe": True,
"parallel_write_safe": True,
"version": __version__,
}

View File

@@ -0,0 +1,117 @@
:root {
}
.docs-book-wrapper {
max-width: 90% !important
}
.accordion, .operation {
/* Unset default bg color for accordion items */
--bs-accordion-active-bg: unset;
}
section .accordion-button {
/* make background of a button transparent to get the right color */
background-color: transparent;
}
section .accordion-button:focus {
/* remove selected border */
border-color: unset;
}
.api-group table.docutils {
width: 100%;
text-align: left;
}
.operation {
}
section.operation-deprecated {
text-decoration: line-through;
background: #ebebeb1a;
border-color: #ebebeb;
opacity: .6;
}
section.operation-get {
border:1px solid #61affe;
background: #61affe1a;
}
section .label-get {
background-color: #61affe !important;
}
section.operation-head {
background: #9012fe1a;
border-color: #9012fe;
}
section .label-head {
background-color: #9012fe !important;
}
section.operation-put {
background: #fca1301a;
border-color: #fca130;
}
section .label-put {
background-color: #fca130;
}
section.operation-patch {
background: #50e3c21a;
border-color: #50e3c2;
}
section .label-patch {
background-color: #50e3c2;
}
section.operation-post {
background: #49cc901a;
border-color: #49cc90;
}
section .label-post {
background-color: #49cc90;
}
section.operation-delete {
background: #f93e3e1a;
border-color: #f93e3e;
}
section .label-delete {
background-color: #f93e3e;
}
section .operation-path {
align-items: start;
color: #3b4151;
display: flex-root;
font-family: monospace;
font-size: 16px;
font-weight: 600;
word-break: break-word;
}
section .operation-summary {
color: #3b4151;
font-family: sans-serif;
font-size: 13px;
word-break: break-word;
}
.operation h3 {
display: none;
}
.requestbody-item {
overflow: scroll;
}
.requestbody-item > p {
position: relative;
display: block;
padding: 10px 15px;
margin-right: 2px
}

6314
os_openapi/assets/bootstrap.bundle.js vendored Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

12068
os_openapi/assets/bootstrap.css vendored Normal file

File diff suppressed because it is too large Load Diff

6
os_openapi/assets/bootstrap.min.css vendored Normal file

File diff suppressed because one or more lines are too long

53
pyproject.toml Normal file
View File

@@ -0,0 +1,53 @@
[project]
name = "os-openapi"
readme = {file = "README.rst", content-type = "text/x-rst"}
authors = [
{ email="openstack-discuss@lists.openstack.org" }
]
license = {text = "Apache-2.0"}
dynamic = ["version"]
dependencies = [
"ruamel.yaml",
"jsonschema",
"markdown",
"sphinx-mdinclude",
"myst_parser"
]
requires-python = ">=3.8"
classifiers = [
"Environment :: OpenStack",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: Implementation :: CPython",
]
[project.urls]
Homepage = "https://docs.openstack.org/openapi"
Repository = "https://opendev.org/openstack/openapi/"
[build-system]
requires = ["pbr>=6.0.0", "setuptools>=64.0.0"]
build-backend = "pbr.build"
[tool.ruff]
line-length = 79
target-version = "py38"
[tool.ruff.lint]
# enable the following rule classes:
#
# C4: https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4
# UP: https://docs.astral.sh/ruff/rules/#pyupgrade-up
select = ["C4", "UP"]
[tool.ruff.format]
docstring-code-format = true
skip-magic-trailing-comma = true

6
requirements.txt Normal file
View File

@@ -0,0 +1,6 @@
pbr
ruamel.yaml
jsonschema
markdown
sphinx-mdinclude
myst_parser

26
setup.cfg Normal file
View File

@@ -0,0 +1,26 @@
[metadata]
name = os-openapi
summary = Sphinx Extensions to support API reference sites in OpenStack
description_file =
README.rst
author = OpenStack
author_email = openstack-discuss@lists.openstack.org
home_page = https://docs.openstack.org/os-api-ref/latest/
python_requires = >=3.8
classifier =
Environment :: OpenStack
Intended Audience :: Information Technology
Intended Audience :: System Administrators
License :: OSI Approved :: Apache Software License
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: 3
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: Implementation :: CPython
[files]
packages =
os_openapi

16
setup.py Normal file
View File

@@ -0,0 +1,16 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(setup_requires=["pbr>=2.0.0"], pbr=True)

4534
specs/baremetal/v1.91.yaml Normal file

File diff suppressed because it is too large Load Diff

1
specs/baremetal/v1.yaml Symbolic link
View File

@@ -0,0 +1 @@
v1.91.yaml

20764
specs/block-storage/v3.71.yaml Normal file

File diff suppressed because it is too large Load Diff

1
specs/block-storage/v3.yaml Symbolic link
View File

@@ -0,0 +1 @@
v3.71.yaml

44650
specs/compute/v2.96.yaml Normal file

File diff suppressed because it is too large Load Diff

1
specs/compute/v2.yaml Symbolic link
View File

@@ -0,0 +1 @@
v2.96.yaml

2904
specs/dns/v2.1.yaml Normal file

File diff suppressed because it is too large Load Diff

1
specs/dns/v2.yaml Symbolic link
View File

@@ -0,0 +1 @@
v2.1.yaml

15283
specs/identity/v3.14.yaml Normal file

File diff suppressed because it is too large Load Diff

1
specs/identity/v3.yaml Symbolic link
View File

@@ -0,0 +1 @@
v3.14.yaml

11932
specs/image/v2.16.yaml Normal file

File diff suppressed because it is too large Load Diff

11932
specs/image/v2.yaml Normal file

File diff suppressed because it is too large Load Diff

11068
specs/load-balancer/v2.27.yaml Normal file

File diff suppressed because it is too large Load Diff

1
specs/load-balancer/v2.yaml Symbolic link
View File

@@ -0,0 +1 @@
v2.27.yaml

29941
specs/network/v2.24.yaml Normal file

File diff suppressed because it is too large Load Diff

1
specs/network/v2.yaml Symbolic link
View File

@@ -0,0 +1 @@
v2.24.yaml

1403
specs/object-store/v1.yaml Normal file

File diff suppressed because it is too large Load Diff

1144
specs/placement/v1.39.yaml Normal file

File diff suppressed because it is too large Load Diff

1144
specs/placement/v1.yaml Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1 @@
v2.85.yaml

28
tox.ini Normal file
View File

@@ -0,0 +1,28 @@
[tox]
minversion = 4.3.0
envlist = docs
[testenv]
description =
Run unit tests.
setenv =
LANG=en_US.UTF-8
LANGUAGE=en_US:en
LC_ALL=C
OS_LOG_CAPTURE={env:OS_LOG_CAPTURE:true}
OS_STDOUT_CAPTURE={env:OS_STDOUT_CAPTURE:true}
OS_STDERR_CAPTURE={env:OS_STDERR_CAPTURE:true}
deps =
-c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
-r{toxinidir}/test-requirements.txt
-r{toxinidir}/requirements.txt
[testenv:docs]
usedevelop = True
description =
Build documentation in HTML format.
deps =
-c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
-r{toxinidir}/doc/requirements.txt
commands =
sphinx-build -W --keep-going -b html -j auto doc/source/ doc/build/html

10
zuul.yaml Normal file
View File

@@ -0,0 +1,10 @@
---
- project:
templates:
- publish-openstack-docs-pti
check:
jobs:
- noop
gate:
jobs:
- noop