Retire Packaging Deb project repos
This commit is part of a series to retire the Packaging Deb project. Step 2 is to remove all content from the project repos, replacing it with a README notification where to find ongoing work, and how to recover the repo if needed at some future point (as in https://docs.openstack.org/infra/manual/drivers.html#retiring-a-project). Change-Id: I999443d9fd240490df5a2b232e6c9c6b55a04a34
This commit is contained in:
97
.gitignore
vendored
97
.gitignore
vendored
@@ -1,97 +0,0 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
.installed.cfg
|
||||
*.egg*
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.testrepository/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
ChangeLog
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# IPython Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# dotenv
|
||||
.env
|
||||
|
||||
# virtualenv
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# IDE files
|
||||
.idea
|
||||
|
||||
# Files created by doc build
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
doc/source/api
|
||||
@@ -1,4 +0,0 @@
|
||||
[gerrit]
|
||||
host=review.openstack.org
|
||||
port=29418
|
||||
project=openstack/glare.git
|
||||
@@ -1,8 +0,0 @@
|
||||
[DEFAULT]
|
||||
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
||||
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
||||
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-160} \
|
||||
${PYTHON:-python} -m subunit.run discover -t ./ ./glare/tests $LISTOPT $IDOPTION
|
||||
|
||||
test_id_option=--load-list $IDFILE
|
||||
test_list_option=--list
|
||||
201
LICENSE
201
LICENSE
@@ -1,201 +0,0 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
14
README
Normal file
14
README
Normal file
@@ -0,0 +1,14 @@
|
||||
This project is no longer maintained.
|
||||
|
||||
The contents of this repository are still available in the Git
|
||||
source code management system. To see the contents of this
|
||||
repository before it reached its end of life, please check out the
|
||||
previous commit with "git checkout HEAD^1".
|
||||
|
||||
For ongoing work on maintaining OpenStack packages in the Debian
|
||||
distribution, please see the Debian OpenStack packaging team at
|
||||
https://wiki.debian.org/OpenStack/.
|
||||
|
||||
For any further questions, please email
|
||||
openstack-dev@lists.openstack.org or join #openstack-dev on
|
||||
Freenode.
|
||||
18
README.rst
18
README.rst
@@ -1,18 +0,0 @@
|
||||
Glare
|
||||
=====
|
||||
|
||||
Glare (from GLare Artifact REpository) is a service that provides access to a
|
||||
unified catalog of structured meta-information as well as related binary data
|
||||
(these structures are also called 'artifacts').
|
||||
|
||||
* Get Started: https://github.com/openstack/glare/blob/master/doc/source/quickstart.rst
|
||||
* Documentation: https://github.com/openstack/glare/blob/master/doc
|
||||
* Source: https://git.openstack.org/cgit/openstack/glare
|
||||
* Bugs: https://bugs.launchpad.net/glare
|
||||
* Blueprints:** https://blueprints.launchpad.net/glare
|
||||
* REST Client:** https://git.openstack.org/cgit/openstack/python-glareclient
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
Apache License Version 2.0 http://www.apache.org/licenses/LICENSE-2.0
|
||||
245
bandit.yaml
245
bandit.yaml
@@ -1,245 +0,0 @@
|
||||
# optional: after how many files to update progress
|
||||
#show_progress_every: 100
|
||||
|
||||
# optional: plugins directory name
|
||||
#plugins_dir: 'plugins'
|
||||
|
||||
# optional: plugins discovery name pattern
|
||||
plugin_name_pattern: '*.py'
|
||||
|
||||
# optional: terminal escape sequences to display colors
|
||||
#output_colors:
|
||||
# DEFAULT: '\033[0m'
|
||||
# HEADER: '\033[95m'
|
||||
# LOW: '\033[94m'
|
||||
# MEDIUM: '\033[93m'
|
||||
# HIGH: '\033[91m'
|
||||
|
||||
# optional: log format string
|
||||
#log_format: "[%(module)s]\t%(levelname)s\t%(message)s"
|
||||
|
||||
# globs of files which should be analyzed
|
||||
include:
|
||||
- '*.py'
|
||||
- '*.pyw'
|
||||
|
||||
# a list of strings, which if found in the path will cause files to be excluded
|
||||
# for example /tests/ - to remove all all files in tests directory
|
||||
exclude_dirs:
|
||||
- '/tests/'
|
||||
|
||||
profiles:
|
||||
gate:
|
||||
include:
|
||||
|
||||
- any_other_function_with_shell_equals_true
|
||||
- assert_used
|
||||
- blacklist_calls
|
||||
- blacklist_import_func
|
||||
|
||||
# One of the blacklisted imports is the subprocess module. Keystone
|
||||
# has to import the subprocess module in a single module for
|
||||
# eventlet support so in most cases bandit won't be able to detect
|
||||
# that subprocess is even being imported. Also, Bandit's
|
||||
# recommendation is just to check that the use is safe without any
|
||||
# documentation on what safe or unsafe usage is. So this test is
|
||||
# skipped.
|
||||
# - blacklist_imports
|
||||
|
||||
- exec_used
|
||||
|
||||
- execute_with_run_as_root_equals_true
|
||||
|
||||
# - hardcoded_bind_all_interfaces # TODO: enable this test
|
||||
|
||||
# Not working because wordlist/default-passwords file not bundled,
|
||||
# see https://bugs.launchpad.net/bandit/+bug/1451575 :
|
||||
# - hardcoded_password
|
||||
|
||||
# Not used because it's prone to false positives:
|
||||
# - hardcoded_sql_expressions
|
||||
|
||||
# - hardcoded_tmp_directory # TODO: enable this test
|
||||
|
||||
- jinja2_autoescape_false
|
||||
|
||||
- linux_commands_wildcard_injection
|
||||
|
||||
- paramiko_calls
|
||||
|
||||
- password_config_option_not_marked_secret
|
||||
- request_with_no_cert_validation
|
||||
- set_bad_file_permissions
|
||||
- subprocess_popen_with_shell_equals_true
|
||||
# - subprocess_without_shell_equals_true # TODO: enable this test
|
||||
- start_process_with_a_shell
|
||||
# - start_process_with_no_shell # TODO: enable this test
|
||||
- start_process_with_partial_path
|
||||
- ssl_with_bad_defaults
|
||||
- ssl_with_bad_version
|
||||
- ssl_with_no_version
|
||||
# - try_except_pass # TODO: enable this test
|
||||
|
||||
- use_of_mako_templates
|
||||
|
||||
blacklist_calls:
|
||||
bad_name_sets:
|
||||
# - pickle:
|
||||
# qualnames: [pickle.loads, pickle.load, pickle.Unpickler,
|
||||
# cPickle.loads, cPickle.load, cPickle.Unpickler]
|
||||
# message: "Pickle library appears to be in use, possible security issue."
|
||||
# TODO: enable this test
|
||||
- marshal:
|
||||
qualnames: [marshal.load, marshal.loads]
|
||||
message: "Deserialization with the marshal module is possibly dangerous."
|
||||
# - md5:
|
||||
# qualnames: [hashlib.md5, Crypto.Hash.MD2.new, Crypto.Hash.MD4.new, Crypto.Hash.MD5.new, cryptography.hazmat.primitives.hashes.MD5]
|
||||
# message: "Use of insecure MD2, MD4, or MD5 hash function."
|
||||
# TODO: enable this test
|
||||
- mktemp_q:
|
||||
qualnames: [tempfile.mktemp]
|
||||
message: "Use of insecure and deprecated function (mktemp)."
|
||||
- eval:
|
||||
qualnames: [eval]
|
||||
message: "Use of possibly insecure function - consider using safer ast.literal_eval."
|
||||
- mark_safe:
|
||||
names: [mark_safe]
|
||||
message: "Use of mark_safe() may expose cross-site scripting vulnerabilities and should be reviewed."
|
||||
- httpsconnection:
|
||||
qualnames: [httplib.HTTPSConnection]
|
||||
message: "Use of HTTPSConnection does not provide security, see https://wiki.openstack.org/wiki/OSSN/OSSN-0033"
|
||||
- yaml_load:
|
||||
qualnames: [yaml.load]
|
||||
message: "Use of unsafe yaml load. Allows instantiation of arbitrary objects. Consider yaml.safe_load()."
|
||||
- urllib_urlopen:
|
||||
qualnames: [urllib.urlopen, urllib.urlretrieve, urllib.URLopener, urllib.FancyURLopener, urllib2.urlopen, urllib2.Request]
|
||||
message: "Audit url open for permitted schemes. Allowing use of file:/ or custom schemes is often unexpected."
|
||||
- random:
|
||||
qualnames: [random.random, random.randrange, random.randint, random.choice, random.uniform, random.triangular]
|
||||
message: "Standard pseudo-random generators are not suitable for security/cryptographic purposes."
|
||||
level: "LOW"
|
||||
|
||||
# Most of this is based off of Christian Heimes' work on defusedxml:
|
||||
# https://pypi.python.org/pypi/defusedxml/#defusedxml-sax
|
||||
|
||||
# TODO(jaegerandi): Enable once defusedxml is in global requirements.
|
||||
#- xml_bad_cElementTree:
|
||||
# qualnames: [xml.etree.cElementTree.parse,
|
||||
# xml.etree.cElementTree.iterparse,
|
||||
# xml.etree.cElementTree.fromstring,
|
||||
# xml.etree.cElementTree.XMLParser]
|
||||
# message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function."
|
||||
#- xml_bad_ElementTree:
|
||||
# qualnames: [xml.etree.ElementTree.parse,
|
||||
# xml.etree.ElementTree.iterparse,
|
||||
# xml.etree.ElementTree.fromstring,
|
||||
# xml.etree.ElementTree.XMLParser]
|
||||
# message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function."
|
||||
- xml_bad_expatreader:
|
||||
qualnames: [xml.sax.expatreader.create_parser]
|
||||
message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function."
|
||||
- xml_bad_expatbuilder:
|
||||
qualnames: [xml.dom.expatbuilder.parse,
|
||||
xml.dom.expatbuilder.parseString]
|
||||
message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function."
|
||||
- xml_bad_sax:
|
||||
qualnames: [xml.sax.parse,
|
||||
xml.sax.parseString,
|
||||
xml.sax.make_parser]
|
||||
message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function."
|
||||
- xml_bad_minidom:
|
||||
qualnames: [xml.dom.minidom.parse,
|
||||
xml.dom.minidom.parseString]
|
||||
message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function."
|
||||
- xml_bad_pulldom:
|
||||
qualnames: [xml.dom.pulldom.parse,
|
||||
xml.dom.pulldom.parseString]
|
||||
message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function."
|
||||
- xml_bad_etree:
|
||||
qualnames: [lxml.etree.parse,
|
||||
lxml.etree.fromstring,
|
||||
lxml.etree.RestrictedElement,
|
||||
lxml.etree.GlobalParserTLS,
|
||||
lxml.etree.getDefaultParser,
|
||||
lxml.etree.check_docinfo]
|
||||
message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function."
|
||||
|
||||
|
||||
shell_injection:
|
||||
# Start a process using the subprocess module, or one of its wrappers.
|
||||
subprocess: [subprocess.Popen, subprocess.call, subprocess.check_call,
|
||||
subprocess.check_output, utils.execute, utils.execute_with_timeout]
|
||||
# Start a process with a function vulnerable to shell injection.
|
||||
shell: [os.system, os.popen, os.popen2, os.popen3, os.popen4,
|
||||
popen2.popen2, popen2.popen3, popen2.popen4, popen2.Popen3,
|
||||
popen2.Popen4, commands.getoutput, commands.getstatusoutput]
|
||||
# Start a process with a function that is not vulnerable to shell injection.
|
||||
no_shell: [os.execl, os.execle, os.execlp, os.execlpe, os.execv,os.execve,
|
||||
os.execvp, os.execvpe, os.spawnl, os.spawnle, os.spawnlp,
|
||||
os.spawnlpe, os.spawnv, os.spawnve, os.spawnvp, os.spawnvpe,
|
||||
os.startfile]
|
||||
|
||||
blacklist_imports:
|
||||
bad_import_sets:
|
||||
- telnet:
|
||||
imports: [telnetlib]
|
||||
level: HIGH
|
||||
message: "Telnet is considered insecure. Use SSH or some other encrypted protocol."
|
||||
- info_libs:
|
||||
imports: [pickle, cPickle, subprocess, Crypto]
|
||||
level: LOW
|
||||
message: "Consider possible security implications associated with {module} module."
|
||||
|
||||
# Most of this is based off of Christian Heimes' work on defusedxml:
|
||||
# https://pypi.python.org/pypi/defusedxml/#defusedxml-sax
|
||||
|
||||
- xml_libs:
|
||||
imports: [xml.etree.cElementTree,
|
||||
xml.etree.ElementTree,
|
||||
xml.sax.expatreader,
|
||||
xml.sax,
|
||||
xml.dom.expatbuilder,
|
||||
xml.dom.minidom,
|
||||
xml.dom.pulldom,
|
||||
lxml.etree,
|
||||
lxml]
|
||||
message: "Using {module} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {module} with the equivilent defusedxml package."
|
||||
level: LOW
|
||||
- xml_libs_high:
|
||||
imports: [xmlrpclib]
|
||||
message: "Using {module} to parse untrusted XML data is known to be vulnerable to XML attacks. Use defused.xmlrpc.monkey_patch() function to monkey-patch xmlrpclib and mitigate XML vulnerabilities."
|
||||
level: HIGH
|
||||
|
||||
hardcoded_tmp_directory:
|
||||
tmp_dirs: ['/tmp', '/var/tmp', '/dev/shm']
|
||||
|
||||
hardcoded_password:
|
||||
# Support for full path, relative path and special "%(site_data_dir)s"
|
||||
# substitution (/usr/{local}/share)
|
||||
word_list: "%(site_data_dir)s/wordlist/default-passwords"
|
||||
|
||||
ssl_with_bad_version:
|
||||
bad_protocol_versions:
|
||||
- 'PROTOCOL_SSLv2'
|
||||
- 'SSLv2_METHOD'
|
||||
- 'SSLv23_METHOD'
|
||||
- 'PROTOCOL_SSLv3' # strict option
|
||||
- 'PROTOCOL_TLSv1' # strict option
|
||||
- 'SSLv3_METHOD' # strict option
|
||||
- 'TLSv1_METHOD' # strict option
|
||||
|
||||
password_config_option_not_marked_secret:
|
||||
function_names:
|
||||
- oslo.config.cfg.StrOpt
|
||||
- oslo_config.cfg.StrOpt
|
||||
|
||||
execute_with_run_as_root_equals_true:
|
||||
function_names:
|
||||
- ceilometer.utils.execute
|
||||
- cinder.utils.execute
|
||||
- neutron.agent.linux.utils.execute
|
||||
- nova.utils.execute
|
||||
- nova.utils.trycmd
|
||||
|
||||
try_except_pass:
|
||||
check_typed_exception: True
|
||||
@@ -1,24 +0,0 @@
|
||||
====================
|
||||
Enabling in Devstack
|
||||
====================
|
||||
|
||||
1. Download DevStack::
|
||||
|
||||
git clone https://github.com/openstack-dev/devstack.git
|
||||
cd devstack
|
||||
|
||||
2. Add this repo as an external repository::
|
||||
|
||||
> cat local.conf
|
||||
[[local|localrc]]
|
||||
enable_plugin glare https://github.com/openstack/glare
|
||||
|
||||
.. note::
|
||||
To enable installation of glare client from git repo instead of pypi execute
|
||||
a shell command:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
export LIBS_FROM_GIT+=python-glareclient
|
||||
|
||||
3. run ``stack.sh``
|
||||
@@ -1,218 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Plugin file for Glare services
|
||||
# -------------------------------
|
||||
|
||||
# Dependencies:
|
||||
# ``functions`` file
|
||||
# ``DEST``, ``DATA_DIR``, ``STACK_USER`` must be defined
|
||||
|
||||
# Save trace setting
|
||||
XTRACE=$(set +o | grep xtrace)
|
||||
set -o xtrace
|
||||
|
||||
echo_summary "glare's plugin.sh was called..."
|
||||
# create_glare_accounts() - Set up common required glare accounts
|
||||
#
|
||||
# Tenant User Roles
|
||||
# ------------------------------
|
||||
# service glare admin
|
||||
function create_glare_accounts() {
|
||||
create_service_user "glare"
|
||||
|
||||
# required for swift access
|
||||
if is_service_enabled s-proxy; then
|
||||
create_service_user "glare-swift" "ResellerAdmin"
|
||||
fi
|
||||
|
||||
get_or_create_service "glare" "artifact" "Artifact repository"
|
||||
get_or_create_endpoint "artifact" \
|
||||
"$REGION_NAME" \
|
||||
"$GLARE_SERVICE_PROTOCOL://$GLARE_SERVICE_HOST:$GLARE_SERVICE_PORT" \
|
||||
"$GLARE_SERVICE_PROTOCOL://$GLARE_SERVICE_HOST:$GLARE_SERVICE_PORT" \
|
||||
"$GLARE_SERVICE_PROTOCOL://$GLARE_SERVICE_HOST:$GLARE_SERVICE_PORT"
|
||||
}
|
||||
|
||||
|
||||
function mkdir_chown_stack {
|
||||
if [[ ! -d "$1" ]]; then
|
||||
sudo mkdir -p "$1"
|
||||
fi
|
||||
sudo chown $STACK_USER "$1"
|
||||
}
|
||||
|
||||
|
||||
function configure_glare {
|
||||
|
||||
# create and clean up auth cache dir
|
||||
mkdir_chown_stack "$GLARE_AUTH_CACHE_DIR"
|
||||
rm -f "$GLARE_AUTH_CACHE_DIR"/*
|
||||
|
||||
mkdir_chown_stack "$GLARE_CONF_DIR"
|
||||
|
||||
# Generate Glare configuration file and configure common parameters.
|
||||
oslo-config-generator --config-file $GLARE_DIR/etc/oslo-config-generator/glare.conf --output-file $GLARE_CONF_FILE
|
||||
|
||||
# Glare Configuration
|
||||
#-------------------------
|
||||
|
||||
iniset $GLARE_CONF_FILE DEFAULT debug $GLARE_DEBUG
|
||||
|
||||
# Specify additional modules with external artifact types
|
||||
if [ -n "$GLARE_CUSTOM_MODULES" ]; then
|
||||
iniset $GLARE_CONF_FILE DEFAULT custom_artifact_types_modules $GLARE_CUSTOM_MODULES
|
||||
fi
|
||||
|
||||
# Specify a list of enabled artifact types
|
||||
if [ -n "$GLARE_ENABLED_TYPES" ]; then
|
||||
iniset $GLARE_CONF_FILE DEFAULT enabled_artifact_types $GLARE_ENABLED_TYPES
|
||||
fi
|
||||
|
||||
oslopolicy-sample-generator --namespace=glare --output-file=$GLARE_POLICY_FILE
|
||||
sed -i 's/^#"//' $GLARE_POLICY_FILE
|
||||
|
||||
cp -p $GLARE_DIR/etc/glare-paste.ini $GLARE_CONF_DIR
|
||||
|
||||
iniset $GLARE_CONF_FILE paste_deploy flavor $GLARE_FLAVOR
|
||||
|
||||
# Setup keystone_authtoken section
|
||||
configure_auth_token_middleware $GLARE_CONF_FILE glare $GLARE_AUTH_CACHE_DIR
|
||||
|
||||
# Setup RabbitMQ credentials
|
||||
iniset $GLARE_CONF_FILE oslo_messaging_rabbit rabbit_userid $RABBIT_USERID
|
||||
iniset $GLARE_CONF_FILE oslo_messaging_rabbit rabbit_password $RABBIT_PASSWORD
|
||||
|
||||
# Enable notifications support
|
||||
iniset $GLARE_CONF_FILE oslo_messaging_notifications driver messaging
|
||||
|
||||
# Configure the database.
|
||||
iniset $GLARE_CONF_FILE database connection `database_connection_url glare`
|
||||
iniset $GLARE_CONF_FILE database max_overflow -1
|
||||
iniset $GLARE_CONF_FILE database max_pool_size 1000
|
||||
|
||||
# Path of policy.yaml file.
|
||||
iniset $GLARE_CONF_FILE oslo_policy policy_file $GLARE_POLICY_FILE
|
||||
|
||||
if [ "$LOG_COLOR" == "True" ] && [ "$SYSLOG" == "False" ]; then
|
||||
setup_colorized_logging $GLARE_CONF_FILE DEFAULT tenant user
|
||||
fi
|
||||
|
||||
if [ "$GLARE_RPC_IMPLEMENTATION" ]; then
|
||||
iniset $GLARE_CONF_FILE DEFAULT rpc_implementation $GLARE_RPC_IMPLEMENTATION
|
||||
fi
|
||||
|
||||
# Configuring storage
|
||||
iniset $GLARE_CONF_FILE glance_store filesystem_store_datadir $GLARE_ARTIFACTS_DIR
|
||||
|
||||
# Store the artifacts in swift if enabled.
|
||||
if is_service_enabled s-proxy; then
|
||||
GLARE_SWIFT_STORE_CONF=$GLARE_CONF_DIR/glare-swift-store.conf
|
||||
cp -p $GLARE_DIR/etc/glare-swift.conf.sample $GLARE_CONF_DIR
|
||||
|
||||
iniset $GLARE_CONF_FILE glance_store default_store swift
|
||||
iniset $GLARE_CONF_FILE glance_store swift_store_create_container_on_put True
|
||||
|
||||
iniset $GLARE_CONF_FILE glance_store swift_store_config_file $GLARE_SWIFT_STORE_CONF
|
||||
iniset $GLARE_CONF_FILE glance_store default_swift_reference ref1
|
||||
iniset $GLARE_CONF_FILE glance_store stores "file, http, swift"
|
||||
|
||||
iniset $GLARE_SWIFT_STORE_CONF ref1 user $SERVICE_PROJECT_NAME:glare-swift
|
||||
|
||||
iniset $GLARE_SWIFT_STORE_CONF ref1 key $SERVICE_PASSWORD
|
||||
iniset $GLARE_SWIFT_STORE_CONF ref1 auth_address $KEYSTONE_SERVICE_URI/v3
|
||||
iniset $GLARE_SWIFT_STORE_CONF ref1 user_domain_name $SERVICE_DOMAIN_NAME
|
||||
iniset $GLARE_SWIFT_STORE_CONF ref1 project_domain_name $SERVICE_DOMAIN_NAME
|
||||
iniset $GLARE_SWIFT_STORE_CONF ref1 auth_version 3
|
||||
|
||||
# commenting is not strictly necessary but it's confusing to have bad values in conf
|
||||
inicomment $GLARE_CONF_FILE glance_store swift_store_user
|
||||
inicomment $GLARE_CONF_FILE glance_store swift_store_key
|
||||
inicomment $GLARE_CONF_FILE glance_store swift_store_auth_address
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# init_glare - Initialize the database
|
||||
function init_glare {
|
||||
# Delete existing artifacts
|
||||
rm -rf $GLARE_ARTIFACTS_DIR
|
||||
mkdir -p $GLARE_ARTIFACTS_DIR
|
||||
|
||||
# (re)create Glare database
|
||||
recreate_database glare utf8
|
||||
|
||||
# Migrate glare database
|
||||
$GLARE_BIN_DIR/glare-db-manage --config-file $GLARE_CONF_FILE upgrade
|
||||
}
|
||||
|
||||
|
||||
# install_glare - Collect source and prepare
|
||||
function install_glare {
|
||||
setup_develop $GLARE_DIR
|
||||
}
|
||||
|
||||
|
||||
function install_glare_pythonclient {
|
||||
if use_library_from_git "python-glareclient"; then
|
||||
git_clone $GLARE_PYTHONCLIENT_REPO $GLARE_PYTHONCLIENT_DIR $GLARE_PYTHONCLIENT_BRANCH
|
||||
setup_develop $GLARE_PYTHONCLIENT_DIR
|
||||
else
|
||||
# nothing actually "requires" glareclient, so force installation from pypi
|
||||
pip_install_gr python-glareclient
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# start_glare - Start running processes, including screen
|
||||
function start_glare {
|
||||
run_process glare "$GLARE_BIN_DIR/glare-api --config-file $GLARE_CONF_DIR/glare.conf"
|
||||
}
|
||||
|
||||
|
||||
# stop_glare - Stop running processes
|
||||
function stop_glare {
|
||||
# Kill the Glare screen windows
|
||||
for serv in glare-api; do
|
||||
stop_process $serv
|
||||
done
|
||||
}
|
||||
|
||||
|
||||
function cleanup_glare {
|
||||
sudo rm -rf $GLARE_ARTIFACTS_DIR $GLARE_AUTH_CACHE_DIR
|
||||
}
|
||||
|
||||
|
||||
if is_service_enabled glare; then
|
||||
if [[ "$1" == "stack" && "$2" == "install" ]]; then
|
||||
echo_summary "Installing glare"
|
||||
install_glare
|
||||
install_glare_pythonclient
|
||||
elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then
|
||||
echo_summary "Configuring glare"
|
||||
create_glare_accounts
|
||||
configure_glare
|
||||
elif [[ "$1" == "stack" && "$2" == "extra" ]]; then
|
||||
echo_summary "Initializing glare"
|
||||
init_glare
|
||||
echo_summary "Starting Glare process"
|
||||
start_glare
|
||||
fi
|
||||
|
||||
if [[ "$1" == "unstack" ]]; then
|
||||
echo_summary "Shutting down glare"
|
||||
stop_glare
|
||||
fi
|
||||
|
||||
if [[ "$1" == "clean" ]]; then
|
||||
echo_summary "Cleaning glare"
|
||||
cleanup_glare
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# Restore xtrace
|
||||
$XTRACE
|
||||
|
||||
# Local variables:
|
||||
# mode: shell-script
|
||||
# End:
|
||||
@@ -1,44 +0,0 @@
|
||||
# Devstack settings
|
||||
|
||||
enable_service glare
|
||||
|
||||
# Set up default directories
|
||||
GLARE_PYTHONCLIENT_REPO=${GLARE_PYTHONCLIENT_REPO:-${GIT_BASE}/openstack/python-glareclient.git}
|
||||
GLARE_PYTHONCLIENT_BRANCH=${GLARE_PYTHONCLIENT_BRANCH:-master}
|
||||
GLARE_PYTHONCLIENT_DIR=$DEST/python-glareclient
|
||||
|
||||
GLARE_DIR=$DEST/glare
|
||||
GLARE_REPO=${GLARE_REPO:-${GIT_BASE}/openstack/glare.git}
|
||||
GLARE_BRANCH=${GLARE_BRANCH:-master}
|
||||
|
||||
# Glare virtual environment
|
||||
if [[ ${USE_VENV} = True ]]; then
|
||||
PROJECT_VENV["glare"]=${GLARE_DIR}.venv
|
||||
GLARE_BIN_DIR=${PROJECT_VENV["glare"]}/bin
|
||||
else
|
||||
GLARE_BIN_DIR=$(get_python_exec_prefix)
|
||||
fi
|
||||
|
||||
GLARE_ARTIFACTS_DIR=${GLARE_ARTIFACTS_DIR:=$DATA_DIR/glare/artifacts}
|
||||
GLARE_AUTH_CACHE_DIR=${GLARE_AUTH_CACHE_DIR:-/var/cache/glare}
|
||||
|
||||
GLARE_CONF_DIR=${GLARE_CONF_DIR:-/etc/glare}
|
||||
GLARE_CONF_FILE=$GLARE_CONF_DIR/glare.conf
|
||||
GLARE_PASTE_INI=$GLARE_CONF_DIR/glare-paste.ini
|
||||
GLARE_POLICY_FILE=$GLARE_CONF_DIR/policy.yaml
|
||||
GLARE_SWIFT_STORE_CONF=$GLARE_CONF_DIR/glare-swift-store.conf
|
||||
|
||||
if is_ssl_enabled_service "glare" || is_service_enabled tls-proxy; then
|
||||
GLARE_SERVICE_PROTOCOL="https"
|
||||
fi
|
||||
|
||||
# Glare connection info. Note the port must be specified.
|
||||
GLARE_SERVICE_PORT=${GLARE_SERVICE_PORT:-9494}
|
||||
GLARE_SERVICE_HOST=${GLARE_SERVICE_HOST:-$SERVICE_HOST}
|
||||
GLARE_SERVICE_PROTOCOL=${GLARE_SERVICE_PROTOCOL:-$SERVICE_PROTOCOL}
|
||||
|
||||
GLARE_DEBUG=${GLARE_DEBUG:-True}
|
||||
|
||||
GLARE_ADMIN_USER=${GLARE_ADMIN_USER:-glare}
|
||||
|
||||
GLARE_FLAVOR=${GLARE_FLAVOR:-keystone}
|
||||
@@ -1,11 +0,0 @@
|
||||
<h3>Useful Links</h3>
|
||||
<ul>
|
||||
<li><a href="https://launchpad.net/glare">Glare @ Launchpad</a></li>
|
||||
<li><a href="https://wiki.openstack.org/wiki/glare">Glare @ OpenStack Wiki</a></li>
|
||||
</ul>
|
||||
|
||||
{% if READTHEDOCS %}
|
||||
<script type='text/javascript'>
|
||||
$('div.body').css('margin', 0)
|
||||
</script>
|
||||
{% endif %}
|
||||
@@ -1,4 +0,0 @@
|
||||
{% extends "basic/layout.html" %}
|
||||
{% set css_files = css_files + ['_static/tweaks.css'] %}
|
||||
|
||||
{% block relbar1 %}{% endblock relbar1 %}
|
||||
@@ -1,4 +0,0 @@
|
||||
[theme]
|
||||
inherit = nature
|
||||
stylesheet = nature.css
|
||||
pygments_style = tango
|
||||
@@ -1,93 +0,0 @@
|
||||
..
|
||||
Copyright 2017 - Nokia Networks
|
||||
All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
not use this file except in compliance with the License. You may obtain
|
||||
a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
License for the specific language governing permissions and limitations
|
||||
under the License.
|
||||
|
||||
==================
|
||||
Basic architecture
|
||||
==================
|
||||
|
||||
OpenStack Glare has a client-server architecture that provides a Unified REST API,
|
||||
which then transfers control to the appropriate artifact type. The API consists of
|
||||
* *Router*, that converts WSGI requests into appropriate Glare API methods;
|
||||
* *Deserializer*, that parses parameters from user input and performs initial
|
||||
validation checks;
|
||||
* *Controller*, which is responsible for interactions with Glare Engine;
|
||||
* *Serializer*, that prepares information for responses (inserts status code,
|
||||
content-type, response content length, and so on).
|
||||
|
||||
But before the requests reach the API they have to pass trough the set of
|
||||
middlewares, and each performs some actions over Request or Response objects.
|
||||
For example, *Auth* middleware checks that authentication token provided in request
|
||||
header is valid by sending auth requests to Identity service, obtains user
|
||||
information and injects it in Request object as a context objects;
|
||||
*Fault middleware*, on the other hand, is responsible for converting inner
|
||||
Glare exceptions to appropriate http error codes.
|
||||
|
||||
Almost all business logic is provided by Glare *Engine*. It is responsible
|
||||
for *Policy* checking, when operator may define what operations users may execute,
|
||||
based on their contexts; for sending broadcast *Notifications* about performed
|
||||
actions; then it is *Access Control*, when Engine checks if user has rights to
|
||||
modify desired artifact; and finally – *Locking*, that is used to prevent race
|
||||
conditions during artifact updates, when the artifact is locked until the
|
||||
modification operation is finished.
|
||||
|
||||
All the file (Blob data) operations are performed using
|
||||
*glance_store* library, which is responsible for interaction with external
|
||||
storage back ends and (or) local filesystem(s). The glance_store library
|
||||
provides a uniform interface to access the backend stores. Also there is
|
||||
an adapter layer *Store Manager* between Engine and glance_store that is
|
||||
responsible for converting glance_store exceptions and adding some additional
|
||||
logic, like sha256 calculation.
|
||||
|
||||
All database operations are organized with artifact types. Each type installed
|
||||
in the system must implement Glare Artifact Type Interface (GATI) and use
|
||||
appropriate data types to describe its attributes.
|
||||
|
||||
Glare uses several data types from a declarative framework *oslo.versionedobjects*:
|
||||
Integer, Float, String, Boolean, which complemented with the following home-grown
|
||||
data types:
|
||||
* Version — specifies the version of the artifact in ‘SemVer’ format and
|
||||
implements comparison operations.
|
||||
* Dependency — sets a reference to another artifact. At the request of the
|
||||
‘dependency’ field, Glare will get the dependent artifact meta-information.
|
||||
* Blob — specifies a binary object. When a user assigns a value to this field,
|
||||
data will be automatically redirected to one of the connected storages.
|
||||
* List and Dict — define complex data structures such as Lists and Dictionaries
|
||||
of primitive types respectively.
|
||||
|
||||
*Base artifact type* is an abstract class that has a reference implementation
|
||||
of GATI. It contains only common fields, like "id", "name", "version",
|
||||
"created_at”, "owner", and so on.
|
||||
|
||||
Each artifact type is inherited from the Base and adds some additional fields.
|
||||
For example, for Image artifact type there were added "container_format" and
|
||||
"disk_format" string fields, for Heat Template it was "nested_templates" Blob
|
||||
Dictionary.
|
||||
|
||||
*Validators* are objects that can be attached to a filed to perform additional
|
||||
checks. For example, if validator MinLen(1) is attached to a string field it
|
||||
checks that the string value is non empty. Validator ForbiddenChars("/", ",")
|
||||
validates that there shouldn't be slashes and commas in the string.
|
||||
|
||||
Glare uses a central *Database* that is shared amongst all
|
||||
the components in the system and is sql-based by default. Other types
|
||||
of database backends are somewhat supported and used by operators
|
||||
but are not extensively tested upstream.
|
||||
|
||||
.. figure:: ./images/glare-architecture.png
|
||||
:figwidth: 100%
|
||||
:align: center
|
||||
|
||||
.. centered:: Image 1. OpenStack Glare Architecture
|
||||
@@ -1,257 +0,0 @@
|
||||
# Copyright (c) 2010 OpenStack Foundation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
#
|
||||
# Glare documentation build configuration file, created by
|
||||
# sphinx-quickstart on Tue May 18 13:50:15 2010.
|
||||
#
|
||||
# This file is execfile()'d with the current directory set to its containing
|
||||
# dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
sys.path = [
|
||||
os.path.abspath('../..'),
|
||||
os.path.abspath('../../bin')
|
||||
] + sys.path
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['sphinx.ext.coverage',
|
||||
'sphinx.ext.ifconfig',
|
||||
'sphinx.ext.graphviz',
|
||||
'oslosphinx',
|
||||
'stevedore.sphinxext',
|
||||
'oslo_config.sphinxext',
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.viewcode',
|
||||
'oslo_config.sphinxconfiggen',
|
||||
]
|
||||
|
||||
config_generator_config_file = [
|
||||
('../../etc/oslo-config-generator/glare.conf',
|
||||
'_static/glare'),
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
# templates_path = []
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'Glare'
|
||||
copyright = u'2016-present, OpenStack Foundation.'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
from glare.version import version_info as glare_version
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = glare_version.version_string_with_vcs()
|
||||
# The short X.Y version.
|
||||
version = glare_version.canonical_version_string()
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of documents that shouldn't be included in the build.
|
||||
#unused_docs = []
|
||||
|
||||
# List of directories, relative to source directory, that shouldn't be searched
|
||||
# for source files.
|
||||
#exclude_trees = ['api']
|
||||
exclude_patterns = [
|
||||
# The man directory includes some snippet files that are included
|
||||
# in other documents during the build but that should not be
|
||||
# included in the toctree themselves, so tell Sphinx to ignore
|
||||
# them when scanning for input files.
|
||||
'man/footer.rst',
|
||||
'man/general_options.rst',
|
||||
'man/openstack_options.rst',
|
||||
]
|
||||
|
||||
# The reST default role (for this markup: `text`) to use for all documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
show_authors = True
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
modindex_common_prefix = ['glare.']
|
||||
|
||||
# -- Options for man page output --------------------------------------------
|
||||
|
||||
# Grouping the document tree for man pages.
|
||||
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
|
||||
|
||||
man_pages = []
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
# html_theme_path = ["."]
|
||||
# html_theme = '_theme'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = ['_theme']
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
html_title = 'Glare'
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'", "--date=local",
|
||||
"-n1"]
|
||||
try:
|
||||
html_last_updated_fmt = subprocess.check_output(git_cmd).decode('utf-8')
|
||||
except Exception:
|
||||
warnings.warn('Cannot get last updated time from git repository. '
|
||||
'Not setting "html_last_updated_fmt".')
|
||||
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
html_use_modindex = True
|
||||
|
||||
# If false, no index is generated.
|
||||
html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = ''
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'glareedoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output ------------------------------------------------
|
||||
|
||||
# The paper size ('letter' or 'a4').
|
||||
#latex_paper_size = 'letter'
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#latex_font_size = '10pt'
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# documentclass [howto/manual]).
|
||||
#latex_documents = [
|
||||
# ('index', 'Glare.tex', u'Glare Documentation',
|
||||
# u'Glare Team', 'manual'),
|
||||
#]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#latex_preamble = ''
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_use_modindex = True
|
||||
@@ -1,174 +0,0 @@
|
||||
How to create new Artifact Type
|
||||
===============================
|
||||
|
||||
Basics
|
||||
------
|
||||
|
||||
Each artifact type must realize **Glare Artifact Type Interface** (GATI)
|
||||
and be inherited from ``glare.objects.base.BaseArtifact`` class.
|
||||
GATI obliges to specify only one class method – ``get_type_name``
|
||||
that returns a string with unique artifact type name. Other methods
|
||||
and fields are optional.
|
||||
|
||||
.. note::
|
||||
|
||||
Conventionally it is recommended to give names in the plural, in
|
||||
lowercase, with words separated by underscores.
|
||||
|
||||
Example of code for minimal artifact type:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from glare.objects import base
|
||||
|
||||
class HelloWorld(base.BaseArtifact):
|
||||
@classmethod
|
||||
def get_type_name(cls):
|
||||
return "hello_worlds"
|
||||
|
||||
Custom artifact fields
|
||||
----------------------
|
||||
|
||||
Users can add type specific fields to their artifact type to extend
|
||||
its logic and functionality. Follow the requirements of
|
||||
oslo.versionedobjects library all new fields must be placed in class
|
||||
dictionary attribute called ``fields``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from glare.objects import base
|
||||
|
||||
class HelloWorld(base.BaseArtifact):
|
||||
...
|
||||
fields = {...}
|
||||
|
||||
There is a large number of possible field options. Let’s look at the
|
||||
most popular ones.
|
||||
|
||||
Fields of primitive types
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Users are allowed to create additional fields of 5 primitive types:
|
||||
* IntegerField
|
||||
* FloatField
|
||||
* FlexibleBooleanField
|
||||
* StringField
|
||||
* Link
|
||||
|
||||
First four are taken from oslo.versionedobjects directly, Link is a
|
||||
glare-specific field which stores links in specific format to other
|
||||
artifacts in the system.
|
||||
|
||||
.. note::
|
||||
|
||||
It’s recommended to use FlexibleBoolean field instead of just
|
||||
Boolean, because it has more sophisticated coercing. For instance,
|
||||
it accepts string parameters like “true”, “yes”, “1” and so on,
|
||||
and successfully coerces it to boolean value True.
|
||||
|
||||
Users can create their own fields with method ``init`` from Attribute class.
|
||||
This method’s first parameter must be an appropriate field class, other
|
||||
parameters are optional and will be discussed later. In next example we
|
||||
will create 5 new custom fields, one for each primitive type:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from oslo_versionedobjects import fields
|
||||
|
||||
from glare.objects import base
|
||||
from glare.objects.meta import wrappers
|
||||
from glare.objects.meta import fields as glare_fields
|
||||
|
||||
Field = wrappers.Field.init
|
||||
|
||||
class HelloWorld(base.BaseArtifact):
|
||||
@classmethod
|
||||
def get_type_name(cls):
|
||||
return "hello_worlds"
|
||||
|
||||
fields = {
|
||||
'my_int': Field(fields.IntegerField),
|
||||
'my_float': Field(fields.FloatField),
|
||||
'my_bool': Field(fields.FlexibleBooleanField),
|
||||
'my_string': Field(fields.StringField),
|
||||
'my_link': Field(glare_fields.Link)
|
||||
}
|
||||
|
||||
Compound types
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
There are two collections, that may contain fields of primitive types:
|
||||
*List* and *Dict*. Fields of compound types are created with method ``init``
|
||||
of classes ListAttribute and DictAttribute respectively.
|
||||
Unlike Attribute class’ ``init``, this method takes field type class as
|
||||
a first parameter, but not just field class. So, *IntegerField* must be changed
|
||||
to *Integer*, *FloatField* to *Float*, and so on. Finally for collection of
|
||||
links user should use *LinkType*. Let’s add several new compound fields to
|
||||
*HelloWorld* class.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from oslo_versionedobjects import fields
|
||||
|
||||
from glare.objects import base
|
||||
from glare.objects.meta import wrappers
|
||||
from glare.objects.meta import fields as glare_fields
|
||||
|
||||
Field = wrappers.Field.init
|
||||
Dict = wrappers.DictField.init
|
||||
List = wrappers.ListField.init
|
||||
|
||||
class HelloWorld(base.BaseArtifact):
|
||||
@classmethod
|
||||
def get_type_name(cls):
|
||||
return "hello_worlds"
|
||||
|
||||
fields = {
|
||||
...
|
||||
'my_list_of_str': List(fields.String),
|
||||
'my_dict_of_int': Dict(fields.Integer),
|
||||
'my_list_of_float': List(fields.Float),
|
||||
'my_dict_of_bools': Dict(fields.FlexibleBoolean),
|
||||
'my_list_of_links': List(glare_fields.LinkType)
|
||||
}
|
||||
|
||||
Other parameters, like collection max size, possible item values,
|
||||
and so on, also can be specified with additional parameters to ``init``
|
||||
method. They will be discussed later.
|
||||
|
||||
Blob and Folder types
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The most interesting fields in glare framework are *Blob* and
|
||||
*Folder* (or *BlobDict*). These fields allow users to work binary data,
|
||||
which is stored in a standalone cloud storage, like Swift or Ceph.
|
||||
The difference between Blob and Folder is that Blob sets unique endpoint
|
||||
and may contain only one binary object, on the other hand Folder may
|
||||
contain lots of binaries with names specified by user.
|
||||
|
||||
Example of Blob and Folder fields:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from oslo_versionedobjects import fields
|
||||
|
||||
from glare.objects import base
|
||||
from glare.objects.meta import wrappers
|
||||
from glare.objects.meta import fields as glare_fields
|
||||
|
||||
Field = wrappers.Field.init
|
||||
Dict = wrappers.DictField.init
|
||||
List = wrappers.ListField.init
|
||||
Blob = wrappers.BlobField.init
|
||||
Folder = wrappers.FolderField.init
|
||||
|
||||
class HelloWorld(base.BaseArtifact):
|
||||
@classmethod
|
||||
def get_type_name(cls):
|
||||
return "hello_worlds"
|
||||
|
||||
fields = {
|
||||
...
|
||||
'my_blob': Blob(),
|
||||
'my_folder': Folder(),
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
Glare Devstack Installation
|
||||
===========================
|
||||
|
||||
TBD
|
||||
@@ -1,10 +0,0 @@
|
||||
Developer's Reference
|
||||
=====================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 3
|
||||
|
||||
webapi/index
|
||||
creating_custom_artifact_type
|
||||
devstack
|
||||
troubleshooting
|
||||
@@ -1,4 +0,0 @@
|
||||
Troubleshooting And Debugging
|
||||
=============================
|
||||
|
||||
TBD
|
||||
@@ -1,7 +0,0 @@
|
||||
REST API Specification
|
||||
======================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
v1
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +0,0 @@
|
||||
Glare Configuration Guide
|
||||
=========================
|
||||
|
||||
TBD
|
||||
@@ -1,4 +0,0 @@
|
||||
Glare Dashboard Installation Guide
|
||||
==================================
|
||||
|
||||
TBD
|
||||
@@ -1,85 +0,0 @@
|
||||
Glare Client Installation Guide
|
||||
===============================
|
||||
|
||||
To install ``python-glareclient``, it is required to have ``pip``
|
||||
(in most cases). Make sure that ``pip`` is installed. Then type::
|
||||
|
||||
$ pip install python-glareclient
|
||||
|
||||
Or, if it is needed to install ``python-glareclient`` from master branch,
|
||||
type::
|
||||
|
||||
$ pip install git+https://github.com/openstack/python-glareclient.git
|
||||
|
||||
After ``python-glareclient`` is installed you will see command ``glare``
|
||||
in your environment.
|
||||
|
||||
Glare client also provides a plugin ``openstack artifact`` to OpenStack client.
|
||||
If glare client is supposed to be used with OpenStack cloud then additionally
|
||||
``python-openstackclient`` has to be installed::
|
||||
|
||||
$ pip install python-openstackclient
|
||||
|
||||
|
||||
Configure authentication against Keystone
|
||||
-----------------------------------------
|
||||
|
||||
If Keystone is used for authentication in Glare, then the interraction has to
|
||||
be organized with openstackclient plugin ``openstack artifact`` and the
|
||||
environment should have auth variables::
|
||||
|
||||
$ export OS_AUTH_URL=http://<Keystone_host>:5000/v3
|
||||
$ export OS_TENANT_NAME=tenant
|
||||
$ export OS_USERNAME=admin
|
||||
$ export OS_PASSWORD=secret
|
||||
$ export OS_GLARE_URL=http://<Glare host>:9494 (optional, by default URL=http://localhost:9494/)
|
||||
|
||||
And in the case when you are authenticating against keystone over https::
|
||||
|
||||
$ export OS_CACERT=<path_to_ca_cert>
|
||||
|
||||
.. note:: In client, we can use both Keystone auth versions - v2.0 and v3. But server supports only v3.
|
||||
|
||||
You can see the list of available commands by typing::
|
||||
|
||||
$ openstack artifact --help
|
||||
|
||||
To make sure Glare client works, type::
|
||||
|
||||
$ openstack artifact type-list
|
||||
|
||||
Configure authentication against Keycloak
|
||||
-----------------------------------------
|
||||
|
||||
Glare also supports authentication against Keycloak server via OpenID Connect protocol.
|
||||
In this case ``glare`` command must be used.
|
||||
In order to use it on the client side the environment should look as follows::
|
||||
|
||||
$ export KEYCLOAK_AUTH_URL=https://<Keycloak-server-host>:<Keycloak-server-port>/auth
|
||||
$ export KEYCLOAK_REALM_NAME=my_keycloak_realm
|
||||
$ export KEYCLOAK_USERNAME=admin
|
||||
$ export KEYCLOAK_PASSWORD=secret
|
||||
$ export OPENID_CLIENT_ID=my_keycloak_client
|
||||
$ export OS_GLARE_URL=http://<GLARE host>:9494 (optional, by default URL=http://localhost:9494)
|
||||
|
||||
.. note:: If KEYCLOAK_AUTH_URL is set then authentication against KeyCloak will be used
|
||||
|
||||
You can see the list of available commands by typing::
|
||||
|
||||
$ glare --help
|
||||
|
||||
To make sure Glare client works, type::
|
||||
|
||||
$ glare type-list
|
||||
|
||||
Send tokens directly without authentication
|
||||
-------------------------------------------
|
||||
|
||||
Glare has a possibility to send tokens directly.
|
||||
In order to use it on the client side the environment should look as follows::
|
||||
|
||||
$ export OS_GLARE_URL=http://<GLARE host>:9494 (optional, by default URL=http://localhost:9494)
|
||||
$ export AUTH_TOKEN=secret_token
|
||||
|
||||
.. note:: It's more convenient to specify token as a command parameter in format ``--auth-token``,
|
||||
for example, ``glare --auth-token secret_token type-list``
|
||||
@@ -1,4 +0,0 @@
|
||||
Custom Actions Hooks Guide
|
||||
==========================
|
||||
|
||||
TBD
|
||||
@@ -1,4 +0,0 @@
|
||||
Glare Installation Guide
|
||||
========================
|
||||
|
||||
TBD
|
||||
@@ -1,7 +0,0 @@
|
||||
Glare Upgrade Guide
|
||||
===================
|
||||
|
||||
Database Upgrade
|
||||
----------------
|
||||
|
||||
TBD
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 40 KiB |
@@ -1,876 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<graphml xmlns="http://graphml.graphdrawing.org/xmlns" xmlns:java="http://www.yworks.com/xml/yfiles-common/1.0/java" xmlns:sys="http://www.yworks.com/xml/yfiles-common/markup/primitives/2.0" xmlns:x="http://www.yworks.com/xml/yfiles-common/markup/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:y="http://www.yworks.com/xml/graphml" xmlns:yed="http://www.yworks.com/xml/yed/3" xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns http://www.yworks.com/xml/schema/graphml/1.1/ygraphml.xsd">
|
||||
<!--Created by yEd 3.16.2.1-->
|
||||
<key attr.name="Description" attr.type="string" for="graph" id="d0"/>
|
||||
<key for="port" id="d1" yfiles.type="portgraphics"/>
|
||||
<key for="port" id="d2" yfiles.type="portgeometry"/>
|
||||
<key for="port" id="d3" yfiles.type="portuserdata"/>
|
||||
<key attr.name="url" attr.type="string" for="node" id="d4"/>
|
||||
<key attr.name="description" attr.type="string" for="node" id="d5"/>
|
||||
<key for="node" id="d6" yfiles.type="nodegraphics"/>
|
||||
<key for="graphml" id="d7" yfiles.type="resources"/>
|
||||
<key attr.name="url" attr.type="string" for="edge" id="d8"/>
|
||||
<key attr.name="description" attr.type="string" for="edge" id="d9"/>
|
||||
<key for="edge" id="d10" yfiles.type="edgegraphics"/>
|
||||
<graph edgedefault="directed" id="G">
|
||||
<data key="d0"/>
|
||||
<node id="n0" yfiles.foldertype="group">
|
||||
<data key="d4"/>
|
||||
<data key="d5"/>
|
||||
<data key="d6">
|
||||
<y:ProxyAutoBoundsNode>
|
||||
<y:Realizers active="0">
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="756.267578125" width="597.0" x="277.0" y="23.078125"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="597.0" x="0.0" y="0.0">Glare Service</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
|
||||
</y:GroupNode>
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" y="0.0">Folder 6</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
|
||||
</y:GroupNode>
|
||||
</y:Realizers>
|
||||
</y:ProxyAutoBoundsNode>
|
||||
</data>
|
||||
<graph edgedefault="directed" id="n0:">
|
||||
<node id="n0::n0" yfiles.foldertype="group">
|
||||
<data key="d4"/>
|
||||
<data key="d6">
|
||||
<y:ProxyAutoBoundsNode>
|
||||
<y:Realizers active="0">
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="130.4609375" width="175.0" x="457.0" y="59.5390625"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="175.0" x="0.0" y="0.0">Middlewares</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
|
||||
</y:GroupNode>
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" y="0.0">Folder 1</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
|
||||
</y:GroupNode>
|
||||
</y:Realizers>
|
||||
</y:ProxyAutoBoundsNode>
|
||||
</data>
|
||||
<graph edgedefault="directed" id="n0::n0:">
|
||||
<node id="n0::n0::n0">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="145.0" x="472.0" y="96.0"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="105.0625" x="19.96875" y="6.015625">Auth Middleware<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n0::n1">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="145.0" x="472.0" y="145.0"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="106.837890625" x="19.0810546875" y="6.015625">Fault Middleware<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
</graph>
|
||||
</node>
|
||||
<node id="n0::n1" yfiles.foldertype="group">
|
||||
<data key="d4"/>
|
||||
<data key="d6">
|
||||
<y:ProxyAutoBoundsNode>
|
||||
<y:Realizers active="0">
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="201.4609375" width="369.0" x="360.0" y="211.5390625"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="369.0" x="0.0" y="0.0">API</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="22" leftF="22.0" right="17" rightF="17.0" top="6" topF="6.0"/>
|
||||
</y:GroupNode>
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" y="0.0">Folder 2</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
|
||||
</y:GroupNode>
|
||||
</y:Realizers>
|
||||
</y:ProxyAutoBoundsNode>
|
||||
</data>
|
||||
<graph edgedefault="directed" id="n0::n1:">
|
||||
<node id="n0::n1::n0">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="93.0" x="498.0" y="254.0"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="44.306640625" x="24.3466796875" y="6.015625">Router<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n1::n1">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="93.0" x="397.0" y="308.0"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="75.162109375" x="8.9189453125" y="6.015625">Deserializer<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n1::n2">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="93.0" x="498.0" y="368.0"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="63.291015625" x="14.8544921875" y="6.015625">Controller<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n1::n3">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="93.0" x="604.0" y="308.0"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="59.904296875" x="16.5478515625" y="6.015625">Serializer<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
</graph>
|
||||
</node>
|
||||
<node id="n0::n2">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="93.0" x="498.0" y="471.0"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="45.126953125" x="23.9365234375" y="6.015625">Engine<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n3" yfiles.foldertype="group">
|
||||
<data key="d4"/>
|
||||
<data key="d6">
|
||||
<y:ProxyAutoBoundsNode>
|
||||
<y:Realizers active="0">
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="231.4609375" width="123.0" x="736.0" y="361.654296875"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="123.0" x="0.0" y="0.0">Utils</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
|
||||
</y:GroupNode>
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" y="0.0">Folder 3</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
|
||||
</y:GroupNode>
|
||||
</y:Realizers>
|
||||
</y:ProxyAutoBoundsNode>
|
||||
</data>
|
||||
<graph edgedefault="directed" id="n0::n3:">
|
||||
<node id="n0::n3::n0">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="93.0" x="751.0" y="398.115234375"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="89.546875" x="1.7265625" y="6.015625">PolicyEnforcer<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n3::n1">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="93.0" x="751.0" y="448.115234375"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="48.232421875" x="22.3837890625" y="6.015625">Notifier<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n3::n2">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="93.0" x="751.0" y="498.115234375"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="92.74609375" x="0.126953125" y="6.015625">Access Control<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n3::n3">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="93.0" x="751.0" y="548.115234375"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="50.130859375" x="21.4345703125" y="6.015625">Locking<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
</graph>
|
||||
</node>
|
||||
<node id="n0::n4" yfiles.foldertype="group">
|
||||
<data key="d4"/>
|
||||
<data key="d6">
|
||||
<y:ProxyAutoBoundsNode>
|
||||
<y:Realizers active="0">
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="141.4609375" width="153.0" x="315.0" y="434.5390625"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="153.0" x="0.0" y="0.0">Store</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
|
||||
</y:GroupNode>
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" y="0.0">Folder 4</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
|
||||
</y:GroupNode>
|
||||
</y:Realizers>
|
||||
</y:ProxyAutoBoundsNode>
|
||||
</data>
|
||||
<graph edgedefault="directed" id="n0::n4:">
|
||||
<node id="n0::n4::n0">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="123.0" x="330.0" y="471.0"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="92.39453125" x="15.302734375" y="6.015625">Store Manager<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n4::n1">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="123.0" x="330.0" y="531.0"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="80.505859375" x="21.2470703125" y="6.015625">glance_store<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
</graph>
|
||||
</node>
|
||||
<node id="n0::n5" yfiles.foldertype="group">
|
||||
<data key="d4"/>
|
||||
<data key="d6">
|
||||
<y:ProxyAutoBoundsNode>
|
||||
<y:Realizers active="0">
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="156.23046875" width="373.0" x="413.0" y="608.115234375"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="373.0" x="0.0" y="0.0">Objects</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
|
||||
</y:GroupNode>
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" y="0.0">Folder 5</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
|
||||
</y:GroupNode>
|
||||
</y:Realizers>
|
||||
</y:ProxyAutoBoundsNode>
|
||||
</data>
|
||||
<graph edgedefault="directed" id="n0::n5:">
|
||||
<node id="n0::n5::n0">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="93.0" x="678.0" y="644.576171875"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="38.822265625" x="27.0888671875" y="6.015625">Fields<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n5::n1">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="93.0" x="678.0" y="689.345703125"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="64.43359375" x="14.283203125" y="6.015625">Validators<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n5::n2">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="93.0" x="498.0" y="644.576171875"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="81.09765625" x="5.951171875" y="6.015625">Base Artifact<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n5::n3">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="93.0" x="428.0" y="719.345703125"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="47.833984375" x="22.5830078125" y="6.015625">Images<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n5::n4">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="101.0" x="551.0" y="719.345703125"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="99.326171875" x="0.8369140625" y="6.015625">Heat Templates<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
</graph>
|
||||
</node>
|
||||
<node id="n0::n6">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="101.0" x="292.0" y="644.576171875"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="43.591796875" x="28.7041015625" y="6.015625">DB api<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n0::n7">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="101.0" x="566.5" y="541.76953125"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="79.5625" x="10.71875" y="6.015625">oslo.vo Base<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
</graph>
|
||||
</node>
|
||||
<node id="n1" yfiles.foldertype="group">
|
||||
<data key="d4"/>
|
||||
<data key="d5"/>
|
||||
<data key="d6">
|
||||
<y:ProxyAutoBoundsNode>
|
||||
<y:Realizers active="0">
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="123.4609375" width="131.0" x="100.5" y="494.5390625"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="131.0" x="0.0" y="0.0">Cloud storages</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
|
||||
</y:GroupNode>
|
||||
<y:GroupNode>
|
||||
<y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
|
||||
<y:Fill color="#F5F5F5" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
|
||||
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" y="0.0">Folder 7</y:NodeLabel>
|
||||
<y:Shape type="roundrectangle"/>
|
||||
<y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
|
||||
<y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
|
||||
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
|
||||
</y:GroupNode>
|
||||
</y:Realizers>
|
||||
</y:ProxyAutoBoundsNode>
|
||||
</data>
|
||||
<graph edgedefault="directed" id="n1:">
|
||||
<node id="n1::n0">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="101.0" x="115.5" y="531.0"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="33.6953125" x="33.65234375" y="6.015625">Swift<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<node id="n1::n1">
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="101.0" x="115.5" y="573.0"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="34.984375" x="33.0078125" y="6.015625">Ceph<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
</graph>
|
||||
</node>
|
||||
<node id="n2">
|
||||
<data key="d5"/>
|
||||
<data key="d6">
|
||||
<y:ShapeNode>
|
||||
<y:Geometry height="30.0" width="101.0" x="115.5" y="644.576171875"/>
|
||||
<y:Fill color="#FFCC00" transparent="false"/>
|
||||
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
|
||||
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="61.2578125" x="19.87109375" y="6.015625">Database<y:LabelModel>
|
||||
<y:SmartNodeLabelModel distance="4.0"/>
|
||||
</y:LabelModel>
|
||||
<y:ModelParameter>
|
||||
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
|
||||
</y:ModelParameter>
|
||||
</y:NodeLabel>
|
||||
<y:Shape type="rectangle"/>
|
||||
</y:ShapeNode>
|
||||
</data>
|
||||
</node>
|
||||
<edge id="n0::n1::e0" source="n0::n1::n0" target="n0::n1::n1">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::n1::e1" source="n0::n1::n1" target="n0::n1::n2">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::n1::e2" source="n0::n1::n2" target="n0::n1::n3">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::n1::e3" source="n0::n1::n3" target="n0::n1::n0">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::e0" source="n0::n1::n2" target="n0::n2">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::e1" source="n0::n2" target="n0::n3::n0">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::e2" source="n0::n2" target="n0::n3::n1">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::e3" source="n0::n2" target="n0::n3::n2">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::e4" source="n0::n2" target="n0::n3::n3">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::e5" source="n0::n1::n0" target="n0::n0">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="standard" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::n5::e0" source="n0::n5::n3" target="n0::n5::n2">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="white_delta"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::n5::e1" source="n0::n5::n4" target="n0::n5::n2">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="white_delta"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::e6" source="n0::n2" target="n0::n5::n2">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::e7" source="n0::n5::n2" target="n0::n5">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="-66.45188359803838" ty="-10.429848402884339"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::e8" source="n0::n5::n2" target="n0::n6">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::e9" source="n0::n5::n2" target="n0::n7">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="white_delta"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::n5::e2" source="n0::n5::n1" target="n0::n5::n2">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::n5::e3" source="n0::n5::n0" target="n0::n5::n2">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::e10" source="n0::n2" target="n0::n4::n0">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::n4::e0" source="n0::n4::n0" target="n0::n4::n1">
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="n0::n5::e4" source="n0::n5::n0" target="n0::n5::n2">
|
||||
<data key="d9"/>
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="none" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="e0" source="n0::n4::n1" target="n1::n0">
|
||||
<data key="d9"/>
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="standard" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="e1" source="n0::n4::n1" target="n1::n1">
|
||||
<data key="d9"/>
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="standard" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
<edge id="e2" source="n0::n6" target="n2">
|
||||
<data key="d9"/>
|
||||
<data key="d10">
|
||||
<y:PolyLineEdge>
|
||||
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
|
||||
<y:LineStyle color="#000000" type="line" width="1.0"/>
|
||||
<y:Arrows source="standard" target="standard"/>
|
||||
<y:BendStyle smoothed="false"/>
|
||||
</y:PolyLineEdge>
|
||||
</data>
|
||||
</edge>
|
||||
</graph>
|
||||
<data key="d7">
|
||||
<y:Resources/>
|
||||
</data>
|
||||
</graphml>
|
||||
@@ -1,58 +0,0 @@
|
||||
Welcome to Glare's documentation!
|
||||
=================================
|
||||
|
||||
Glare is the OpenStack artifact service. This project aims to provide
|
||||
a mechanism to define tasks and workflows without writing code, manage
|
||||
and execute them in the cloud environment.
|
||||
|
||||
Overview
|
||||
--------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
overview
|
||||
quickstart
|
||||
architecture
|
||||
Roadmap <https://wiki.openstack.org/wiki/Glare/Roadmap>
|
||||
main_features
|
||||
|
||||
User guide
|
||||
----------
|
||||
|
||||
**Installation**
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
guides/installation_guide
|
||||
guides/configuration_guide
|
||||
guides/dashboard_guide
|
||||
guides/upgrade_guide
|
||||
guides/glareclient_guide
|
||||
guides/hooks_guide
|
||||
|
||||
**API**
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
developer/webapi/index
|
||||
|
||||
|
||||
Developer guide
|
||||
---------------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
developer/index
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
Glare Features
|
||||
==============
|
||||
|
||||
TBD
|
||||
@@ -1,17 +0,0 @@
|
||||
Glare Overview
|
||||
==============
|
||||
|
||||
What is Glare?
|
||||
--------------
|
||||
|
||||
TBD
|
||||
|
||||
Main use cases
|
||||
--------------
|
||||
|
||||
TBD
|
||||
|
||||
Rationale
|
||||
---------
|
||||
|
||||
TBD
|
||||
@@ -1,4 +0,0 @@
|
||||
Quick Start
|
||||
===========
|
||||
|
||||
TBD
|
||||
@@ -1,53 +0,0 @@
|
||||
# Use this pipeline for trusted auth - DEFAULT
|
||||
# Auth token has format user:tenant:roles
|
||||
[pipeline:glare-api]
|
||||
pipeline = cors faultwrapper healthcheck http_proxy_to_wsgi versionnegotiation osprofiler trustedauth glarev1api
|
||||
|
||||
# Use this pipeline for keystone auth
|
||||
[pipeline:glare-api-keystone]
|
||||
pipeline = cors faultwrapper healthcheck http_proxy_to_wsgi versionnegotiation osprofiler authtoken context glarev1api
|
||||
|
||||
# Use this pipeline for Keycloak auth
|
||||
[pipeline:glare-api-keycloak]
|
||||
pipeline = cors faultwrapper healthcheck http_proxy_to_wsgi versionnegotiation osprofiler keycloak context glarev1api
|
||||
|
||||
# Use this pipeline when you want to specify context params manually
|
||||
[pipeline:glare-api-noauth]
|
||||
pipeline = cors faultwrapper healthcheck http_proxy_to_wsgi versionnegotiation osprofiler context glarev1api
|
||||
|
||||
[app:glarev1api]
|
||||
paste.app_factory = glare.api.v1.router:API.factory
|
||||
|
||||
[filter:healthcheck]
|
||||
paste.filter_factory = oslo_middleware:Healthcheck.factory
|
||||
backends = disable_by_file
|
||||
disable_by_file_path = /etc/glare/healthcheck_disable
|
||||
|
||||
[filter:versionnegotiation]
|
||||
paste.filter_factory = glare.api.middleware.version_negotiation:GlareVersionNegotiationFilter.factory
|
||||
|
||||
[filter:faultwrapper]
|
||||
paste.filter_factory = glare.api.middleware.fault:GlareFaultWrapperFilter.factory
|
||||
|
||||
[filter:context]
|
||||
paste.filter_factory = glare.api.middleware.context:ContextMiddleware.factory
|
||||
|
||||
[filter:trustedauth]
|
||||
paste.filter_factory = glare.api.middleware.context:TrustedAuthMiddleware.factory
|
||||
|
||||
[filter:authtoken]
|
||||
paste.filter_factory = keystonemiddleware.auth_token:filter_factory
|
||||
delay_auth_decision = true
|
||||
|
||||
[filter:keycloak]
|
||||
paste.filter_factory = glare.api.middleware.keycloak_auth:KeycloakAuthMiddleware.factory
|
||||
|
||||
[filter:osprofiler]
|
||||
paste.filter_factory = osprofiler.web:WsgiMiddleware.factory
|
||||
|
||||
[filter:cors]
|
||||
use = egg:oslo.middleware#cors
|
||||
oslo_config_project = glare
|
||||
|
||||
[filter:http_proxy_to_wsgi]
|
||||
paste.filter_factory = oslo_middleware:HTTPProxyToWSGI.factory
|
||||
@@ -1,25 +0,0 @@
|
||||
# glare-swift.conf.sample
|
||||
#
|
||||
# This file is an example config file when
|
||||
# multiple swift accounts/backing stores are enabled.
|
||||
#
|
||||
# Specify the reference name in []
|
||||
# For each section, specify the auth_address, user and key.
|
||||
#
|
||||
# WARNING:
|
||||
# * If any of auth_address, user or key is not specified,
|
||||
# the glare's swift store will fail to configure
|
||||
|
||||
[ref1]
|
||||
user = tenant:user1
|
||||
key = key1
|
||||
auth_version = 2
|
||||
auth_address = http://localhost:5000/v2.0
|
||||
|
||||
[ref2]
|
||||
user = project_name:user_name2
|
||||
key = key2
|
||||
user_domain_id = default
|
||||
project_domain_id = default
|
||||
auth_version = 3
|
||||
auth_address = http://localhost:5000/v3
|
||||
@@ -1,13 +0,0 @@
|
||||
[DEFAULT]
|
||||
output_file = etc/glare.conf.sample
|
||||
namespace = glare
|
||||
namespace = glance.store
|
||||
namespace = keystonemiddleware.auth_token
|
||||
namespace = oslo.concurrency
|
||||
namespace = oslo.db
|
||||
namespace = oslo.db.concurrency
|
||||
namespace = oslo.log
|
||||
namespace = oslo.messaging
|
||||
namespace = oslo.middleware.cors
|
||||
namespace = oslo.middleware.http_proxy_to_wsgi
|
||||
namespace = oslo.policy
|
||||
@@ -1,158 +0,0 @@
|
||||
# Copyright 2011-2016 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_context import context
|
||||
from oslo_log import log as logging
|
||||
from oslo_middleware import base as base_middleware
|
||||
from oslo_middleware import request_id
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from glare.common import exception
|
||||
from glare.common import policy
|
||||
from glare.i18n import _
|
||||
|
||||
context_opts = [
|
||||
cfg.BoolOpt('allow_anonymous_access', default=False,
|
||||
help=_('Allow unauthenticated users to access the API with '
|
||||
'read-only privileges. This only applies when using '
|
||||
'ContextMiddleware.'))
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(context_opts)
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RequestContext(context.RequestContext):
|
||||
"""Stores information about the security context for Glare.
|
||||
|
||||
Stores how the user accesses the system, as well as additional request
|
||||
information.
|
||||
"""
|
||||
|
||||
def __init__(self, service_catalog=None, **kwargs):
|
||||
super(RequestContext, self).__init__(**kwargs)
|
||||
self.service_catalog = service_catalog
|
||||
# check if user is admin using policy file
|
||||
if kwargs.get('is_admin') is None:
|
||||
self.is_admin = policy.check_is_admin(self)
|
||||
|
||||
def to_dict(self):
|
||||
d = super(RequestContext, self).to_dict()
|
||||
d.update({
|
||||
'service_catalog': self.service_catalog,
|
||||
})
|
||||
return d
|
||||
|
||||
def to_policy_values(self):
|
||||
values = super(RequestContext, self).to_policy_values()
|
||||
values['is_admin'] = self.is_admin
|
||||
values['read_only'] = self.read_only
|
||||
return values
|
||||
|
||||
|
||||
class BaseContextMiddleware(base_middleware.ConfigurableMiddleware):
|
||||
@staticmethod
|
||||
def process_response(resp, request=None):
|
||||
try:
|
||||
request_id = resp.request.context.request_id
|
||||
# For python 3 compatibility need to use bytes type
|
||||
prefix = b'req-' if isinstance(request_id, bytes) else 'req-'
|
||||
|
||||
if not request_id.startswith(prefix):
|
||||
request_id = prefix + request_id
|
||||
|
||||
resp.headers['x-openstack-request-id'] = request_id
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return resp
|
||||
|
||||
|
||||
class ContextMiddleware(BaseContextMiddleware):
|
||||
@staticmethod
|
||||
def process_request(req):
|
||||
"""Convert authentication information into a request context.
|
||||
|
||||
Generate a RequestContext object from the available
|
||||
authentication headers and store on the 'context' attribute
|
||||
of the req object.
|
||||
|
||||
:param req: wsgi request object that will be given the context object
|
||||
:raises: webob.exc.HTTPUnauthorized: when value of the
|
||||
X-Identity-Status header is not
|
||||
'Confirmed' and anonymous access
|
||||
is disallowed
|
||||
"""
|
||||
if req.headers.get('X-Identity-Status') == 'Confirmed':
|
||||
req.context = ContextMiddleware._get_authenticated_context(req)
|
||||
elif CONF.allow_anonymous_access:
|
||||
req.context = RequestContext(read_only=True, is_admin=False)
|
||||
else:
|
||||
raise exception.Unauthorized()
|
||||
|
||||
@staticmethod
|
||||
def _get_authenticated_context(req):
|
||||
headers = req.headers
|
||||
service_catalog = None
|
||||
if headers.get('X-Service-Catalog') is not None:
|
||||
catalog_header = headers.get('X-Service-Catalog')
|
||||
try:
|
||||
service_catalog = jsonutils.loads(catalog_header)
|
||||
except ValueError:
|
||||
raise exception.GlareException(
|
||||
_('Invalid service catalog json.'))
|
||||
kwargs = {
|
||||
'service_catalog': service_catalog,
|
||||
'request_id': req.environ.get(request_id.ENV_REQUEST_ID),
|
||||
}
|
||||
return RequestContext.from_environ(req.environ, **kwargs)
|
||||
|
||||
|
||||
class TrustedAuthMiddleware(BaseContextMiddleware):
|
||||
@staticmethod
|
||||
def process_request(req):
|
||||
auth_token = req.headers.get('X-Auth-Token')
|
||||
if not auth_token:
|
||||
msg = _("Auth token must be provided")
|
||||
raise exception.Unauthorized(msg)
|
||||
try:
|
||||
user, tenant, roles = auth_token.strip().split(':', 3)
|
||||
except ValueError:
|
||||
msg = _("Wrong auth token format. It must be 'user:tenant:roles'")
|
||||
raise exception.Unauthorized(msg)
|
||||
if not tenant:
|
||||
msg = _("Tenant must be specified in auth token. "
|
||||
"Format of the token is 'user:tenant:roles'")
|
||||
raise exception.Unauthorized(msg)
|
||||
elif tenant.lower() == 'none':
|
||||
tenant = None
|
||||
req.headers['X-Identity-Status'] = 'Nope'
|
||||
else:
|
||||
req.headers['X-Identity-Status'] = 'Confirmed'
|
||||
|
||||
req.headers['X-User-Id'] = user
|
||||
req.headers['X-Tenant-Id'] = tenant
|
||||
req.headers['X-Roles'] = roles
|
||||
|
||||
if req.headers.get('X-Identity-Status') == 'Confirmed':
|
||||
kwargs = {'request_id': req.environ.get(request_id.ENV_REQUEST_ID)}
|
||||
req.context = RequestContext.from_environ(req.environ, **kwargs)
|
||||
elif CONF.allow_anonymous_access:
|
||||
req.context = RequestContext(read_only=True, is_admin=False)
|
||||
else:
|
||||
raise exception.Unauthorized()
|
||||
@@ -1,128 +0,0 @@
|
||||
# Copyright 2016 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""A middleware that turns exceptions into parsable string.
|
||||
Inspired by Cinder's and Heat't faultwrapper.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_middleware import base as base_middleware
|
||||
from oslo_utils import reflection
|
||||
import six
|
||||
import webob.dec
|
||||
import webob.exc
|
||||
|
||||
from glare.common import exception
|
||||
from glare.common import wsgi
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Fault(object):
|
||||
|
||||
def __init__(self, error):
|
||||
self.error = error
|
||||
|
||||
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
||||
def __call__(self, req):
|
||||
serializer = wsgi.JSONResponseSerializer()
|
||||
resp = webob.Response(request=req)
|
||||
default_webob_exc = webob.exc.HTTPInternalServerError()
|
||||
resp.status_code = self.error.get('code', default_webob_exc.code)
|
||||
serializer.default(resp, self.error)
|
||||
return resp
|
||||
|
||||
|
||||
class GlareFaultWrapperFilter(base_middleware.ConfigurableMiddleware):
|
||||
"""Replace error body with something the client can parse."""
|
||||
error_map = {
|
||||
'BadRequest': webob.exc.HTTPBadRequest,
|
||||
'Unauthorized': webob.exc.HTTPUnauthorized,
|
||||
'Forbidden': webob.exc.HTTPForbidden,
|
||||
'NotFound': webob.exc.HTTPNotFound,
|
||||
'RequestTimeout': webob.exc.HTTPRequestTimeout,
|
||||
'Conflict': webob.exc.HTTPConflict,
|
||||
'Gone': webob.exc.HTTPGone,
|
||||
'PreconditionFailed': webob.exc.HTTPPreconditionFailed,
|
||||
'RequestEntityTooLarge': webob.exc.HTTPRequestEntityTooLarge,
|
||||
'UnsupportedMediaType': webob.exc.HTTPUnsupportedMediaType,
|
||||
'RequestRangeNotSatisfiable': webob.exc.HTTPRequestRangeNotSatisfiable,
|
||||
'Locked': webob.exc.HTTPLocked,
|
||||
'FailedDependency': webob.exc.HTTPFailedDependency,
|
||||
'NotAcceptable': webob.exc.HTTPNotAcceptable,
|
||||
'Exception': webob.exc.HTTPInternalServerError,
|
||||
}
|
||||
|
||||
def _map_exception_to_error(self, class_exception):
|
||||
if class_exception.__name__ not in self.error_map:
|
||||
return self._map_exception_to_error(class_exception.__base__)
|
||||
|
||||
return self.error_map[class_exception.__name__]
|
||||
|
||||
def _error(self, ex):
|
||||
traceback_marker = 'Traceback (most recent call last)'
|
||||
webob_exc = None
|
||||
|
||||
ex_type = reflection.get_class_name(ex, fully_qualified=False)
|
||||
|
||||
full_message = six.text_type(ex)
|
||||
if traceback_marker in full_message:
|
||||
message, msg_trace = full_message.split(traceback_marker, 1)
|
||||
message = message.rstrip('\n')
|
||||
msg_trace = traceback_marker + msg_trace
|
||||
else:
|
||||
msg_trace = 'None\n'
|
||||
if sys.exc_info() != (None, None, None):
|
||||
msg_trace = traceback.format_exc()
|
||||
message = full_message
|
||||
|
||||
if isinstance(ex, exception.GlareException):
|
||||
message = six.text_type(ex)
|
||||
|
||||
if not webob_exc:
|
||||
webob_exc = self._map_exception_to_error(ex.__class__)
|
||||
|
||||
error = {
|
||||
'code': webob_exc.code,
|
||||
'title': webob_exc.title,
|
||||
'explanation': webob_exc.explanation,
|
||||
'error': {
|
||||
'message': message,
|
||||
'type': ex_type,
|
||||
}
|
||||
}
|
||||
|
||||
if cfg.CONF.debug:
|
||||
error['error']['traceback'] = msg_trace
|
||||
|
||||
# add microversion header is this is not acceptable request
|
||||
if isinstance(ex, exception.InvalidGlobalAPIVersion):
|
||||
error['min_version'] = ex.kwargs['min_ver']
|
||||
error['max_version'] = ex.kwargs['max_ver']
|
||||
|
||||
return error
|
||||
|
||||
@webob.dec.wsgify
|
||||
def __call__(self, req):
|
||||
try:
|
||||
return req.get_response(self.application)
|
||||
except Exception as exc:
|
||||
LOG.exception(exc)
|
||||
return req.get_response(Fault(self._error(exc)))
|
||||
@@ -1,134 +0,0 @@
|
||||
# Copyright 2017 - Nokia Networks
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import jwt
|
||||
import memcache
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_middleware import base as base_middleware
|
||||
import pprint
|
||||
import requests
|
||||
import webob.dec
|
||||
|
||||
from glare.common import exception
|
||||
from glare.i18n import _
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
keycloak_oidc_opts = [
|
||||
cfg.StrOpt(
|
||||
'auth_url',
|
||||
default='http://127.0.0.1:8080/auth',
|
||||
help='Keycloak base url (e.g. https://my.keycloak:8443/auth)'
|
||||
),
|
||||
cfg.StrOpt(
|
||||
'user_info_endpoint_url',
|
||||
default='/realms/%s/protocol/openid-connect/userinfo',
|
||||
help='Endpoint against which authorization will be performed'
|
||||
),
|
||||
cfg.StrOpt(
|
||||
'insecure',
|
||||
default=False,
|
||||
help='If True, SSL/TLS certificate verification is disabled'
|
||||
),
|
||||
cfg.StrOpt(
|
||||
'memcached_server',
|
||||
default=None,
|
||||
help='Url of memcached server to use for caching'
|
||||
),
|
||||
cfg.IntOpt(
|
||||
'token_cache_time',
|
||||
default=60,
|
||||
min=0,
|
||||
help='In order to prevent excessive effort spent validating '
|
||||
'tokens, the middleware caches previously-seen tokens '
|
||||
'for a configurable duration (in seconds).'
|
||||
),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(keycloak_oidc_opts, group="keycloak_oidc")
|
||||
|
||||
|
||||
class KeycloakAuthMiddleware(base_middleware.Middleware):
|
||||
def __init__(self, app):
|
||||
super(KeycloakAuthMiddleware, self).__init__(application=app)
|
||||
mcserv_url = CONF.keycloak_oidc.memcached_server
|
||||
self.mcclient = memcache.Client(mcserv_url) if mcserv_url else None
|
||||
|
||||
def authenticate(self, access_token, realm_name):
|
||||
info = None
|
||||
if self.mcclient:
|
||||
info = self.mcclient.get(access_token)
|
||||
|
||||
if info is None and CONF.keycloak_oidc.user_info_endpoint_url:
|
||||
try:
|
||||
resp = requests.get(
|
||||
CONF.keycloak_oidc.auth_url +
|
||||
(CONF.keycloak_oidc.user_info_endpoint_url % realm_name),
|
||||
headers={"Authorization": "Bearer %s" % access_token},
|
||||
verify=not CONF.keycloak_oidc.insecure
|
||||
)
|
||||
except requests.ConnectionError:
|
||||
msg = _("Can't connect to keycloak server with address '%s'."
|
||||
) % CONF.keycloak_oidc.auth_url
|
||||
LOG.error(msg)
|
||||
raise exception.GlareException(message=msg)
|
||||
|
||||
if resp.status_code == 401:
|
||||
raise exception.Unauthorized(message=resp.text)
|
||||
if resp.status_code == 403:
|
||||
raise exception.Forbidden(message=resp.text)
|
||||
elif resp.status_code >= 400:
|
||||
raise exception.GlareException(message=resp.text)
|
||||
|
||||
if self.mcclient:
|
||||
self.mcclient.set(access_token, resp.json(),
|
||||
time=CONF.keycloak_oidc.token_cache_time)
|
||||
info = resp.json()
|
||||
|
||||
LOG.debug("HTTP response from OIDC provider: %s",
|
||||
pprint.pformat(info))
|
||||
|
||||
return info
|
||||
|
||||
@webob.dec.wsgify
|
||||
def __call__(self, request):
|
||||
if 'X-Auth-Token' not in request.headers:
|
||||
msg = _("Auth token must be provided in 'X-Auth-Token' header.")
|
||||
LOG.error(msg)
|
||||
raise exception.Unauthorized()
|
||||
access_token = request.headers.get('X-Auth-Token')
|
||||
try:
|
||||
decoded = jwt.decode(access_token, algorithms=['RS256'],
|
||||
verify=False)
|
||||
except Exception:
|
||||
msg = _("Token can't be decoded because of wrong format.")
|
||||
LOG.error(msg)
|
||||
raise exception.Unauthorized()
|
||||
|
||||
# Get user realm from parsed token
|
||||
# Format is "iss": "http://<host>:<port>/auth/realms/<realm_name>",
|
||||
__, __, realm_name = decoded['iss'].strip().rpartition('/realms/')
|
||||
|
||||
# Get roles from from parsed token
|
||||
roles = ','.join(decoded['realm_access']['roles']) \
|
||||
if 'realm_access' in decoded else ''
|
||||
|
||||
self.authenticate(access_token, realm_name)
|
||||
|
||||
request.headers["X-Identity-Status"] = "Confirmed"
|
||||
request.headers["X-Project-Id"] = realm_name
|
||||
request.headers["X-Roles"] = roles
|
||||
return request.get_response(self.application)
|
||||
@@ -1,125 +0,0 @@
|
||||
# Copyright 2011 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
A filter middleware that inspects the requested URI for a version string
|
||||
and/or Accept headers and attempts to negotiate an API controller to
|
||||
return.
|
||||
"""
|
||||
|
||||
import microversion_parse
|
||||
from oslo_log import log as logging
|
||||
from oslo_middleware import base as base_middleware
|
||||
|
||||
|
||||
from glare.api.v1 import api_version_request as api_version
|
||||
from glare.api import versions as artifacts_versions
|
||||
from glare.common import exception
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GlareVersionNegotiationFilter(base_middleware.ConfigurableMiddleware):
|
||||
"""Middleware that defines API version in request and redirects it
|
||||
to correct Router.
|
||||
"""
|
||||
|
||||
SERVICE_TYPE = 'artifact'
|
||||
MIME_TYPE = 'application/vnd.openstack.artifacts-'
|
||||
|
||||
@staticmethod
|
||||
def get_version_from_accept(accept_header):
|
||||
"""Try to parse accept header to extract api version.
|
||||
|
||||
:param accept_header: accept header
|
||||
:return: version string in the request or None if not specified
|
||||
"""
|
||||
accept = str(accept_header)
|
||||
if accept.startswith(GlareVersionNegotiationFilter.MIME_TYPE):
|
||||
LOG.debug("Using media-type versioning")
|
||||
return accept[len(GlareVersionNegotiationFilter.MIME_TYPE):]
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def process_request(req):
|
||||
"""Process api request:
|
||||
1. Define if this is request for available versions or not
|
||||
2. If it is not version request check extract version
|
||||
3. Validate available version and add version info to request
|
||||
"""
|
||||
|
||||
args = {'method': req.method, 'path': req.path, 'accept': req.accept}
|
||||
LOG.debug("Determining version of request: %(method)s %(path)s "
|
||||
"Accept: %(accept)s", args)
|
||||
|
||||
# determine if this is request for versions
|
||||
if req.path_info in ('/versions', '/'):
|
||||
is_multi = req.path_info == '/'
|
||||
return artifacts_versions.Controller.index(
|
||||
req, is_multi=is_multi)
|
||||
|
||||
# determine api version from request
|
||||
req_version = GlareVersionNegotiationFilter.get_version_from_accept(
|
||||
req.accept)
|
||||
if req_version is None:
|
||||
# determine api version from microversion header
|
||||
LOG.debug("Determine version from microversion header.")
|
||||
req_version = microversion_parse.get_version(
|
||||
req.headers,
|
||||
service_type=GlareVersionNegotiationFilter.SERVICE_TYPE)
|
||||
|
||||
# validate microversions header
|
||||
req.api_version_request = \
|
||||
GlareVersionNegotiationFilter._get_api_version_request(
|
||||
req_version)
|
||||
req_version = req.api_version_request.get_string()
|
||||
|
||||
LOG.debug("Matched version: %s", req_version)
|
||||
LOG.debug('new path %s', req.path_info)
|
||||
|
||||
@staticmethod
|
||||
def _get_api_version_request(req_version):
|
||||
"""Set API version for request based on the version header string."""
|
||||
if req_version is None:
|
||||
LOG.debug("No API version in request header. Use default version.")
|
||||
cur_ver = api_version.APIVersionRequest.default_version()
|
||||
elif req_version == 'latest':
|
||||
# 'latest' is a special keyword which is equivalent to
|
||||
# requesting the maximum version of the API supported
|
||||
cur_ver = api_version.APIVersionRequest.max_version()
|
||||
else:
|
||||
cur_ver = api_version.APIVersionRequest(req_version)
|
||||
|
||||
# Check that the version requested is within the global
|
||||
# minimum/maximum of supported API versions
|
||||
if not cur_ver.matches(cur_ver.min_version(), cur_ver.max_version()):
|
||||
raise exception.InvalidGlobalAPIVersion(
|
||||
req_ver=cur_ver.get_string(),
|
||||
min_ver=cur_ver.min_version().get_string(),
|
||||
max_ver=cur_ver.max_version().get_string())
|
||||
return cur_ver
|
||||
|
||||
@staticmethod
|
||||
def process_response(response, request=None):
|
||||
if hasattr(response, 'headers'):
|
||||
if hasattr(request, 'api_version_request'):
|
||||
api_header_name = microversion_parse.STANDARD_HEADER
|
||||
response.headers[api_header_name] = (
|
||||
GlareVersionNegotiationFilter.SERVICE_TYPE + ' ' +
|
||||
request.api_version_request.get_string())
|
||||
response.headers.add('Vary', api_header_name)
|
||||
|
||||
return response
|
||||
@@ -1,123 +0,0 @@
|
||||
# Copyright 2016 Openstack Foundation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import re
|
||||
|
||||
from glare.common import exception
|
||||
from glare.i18n import _
|
||||
|
||||
|
||||
REST_API_VERSION_HISTORY = """REST API Version History:
|
||||
|
||||
* 1.0 - First stable API version that supports microversion. If API version
|
||||
is not specified in the request then API v1.0 is used as default API
|
||||
version.
|
||||
"""
|
||||
|
||||
|
||||
class APIVersionRequest(object):
|
||||
"""This class represents an API Version Request with convenience
|
||||
methods for manipulation and comparison of version
|
||||
numbers that we need to do to implement microversions.
|
||||
"""
|
||||
|
||||
_MIN_API_VERSION = "1.0"
|
||||
_MAX_API_VERSION = "1.0"
|
||||
_DEFAULT_API_VERSION = "1.0"
|
||||
|
||||
def __init__(self, version_string):
|
||||
"""Create an API version request object.
|
||||
|
||||
:param version_string: String representation of APIVersionRequest.
|
||||
Correct format is 'X.Y', where 'X' and 'Y' are int values.
|
||||
"""
|
||||
match = re.match(r"^([1-9]\d*)\.([1-9]\d*|0)$", version_string)
|
||||
if match:
|
||||
self.ver_major = int(match.group(1))
|
||||
self.ver_minor = int(match.group(2))
|
||||
else:
|
||||
msg = _("API version string %s is not valid. "
|
||||
"Cannot determine API version.") % version_string
|
||||
raise exception.BadRequest(msg)
|
||||
|
||||
def __str__(self):
|
||||
"""Debug/Logging representation of object."""
|
||||
return ("API Version Request Major: %s, Minor: %s"
|
||||
% (self.ver_major, self.ver_minor))
|
||||
|
||||
def _format_type_error(self, other):
|
||||
return TypeError(_("'%(other)s' should be an instance of '%(cls)s'") %
|
||||
{"other": other, "cls": self.__class__})
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, APIVersionRequest):
|
||||
raise self._format_type_error(other)
|
||||
|
||||
return ((self.ver_major, self.ver_minor) <
|
||||
(other.ver_major, other.ver_minor))
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, APIVersionRequest):
|
||||
raise self._format_type_error(other)
|
||||
|
||||
return ((self.ver_major, self.ver_minor) ==
|
||||
(other.ver_major, other.ver_minor))
|
||||
|
||||
def __gt__(self, other):
|
||||
if not isinstance(other, APIVersionRequest):
|
||||
raise self._format_type_error(other)
|
||||
|
||||
return ((self.ver_major, self.ver_minor) >
|
||||
(other.ver_major, other.ver_minor))
|
||||
|
||||
def __le__(self, other):
|
||||
return self < other or self == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __ge__(self, other):
|
||||
return self > other or self == other
|
||||
|
||||
def matches(self, min_version, max_version):
|
||||
"""Returns whether the version object represents a version
|
||||
greater than or equal to the minimum version and less than
|
||||
or equal to the maximum version.
|
||||
|
||||
:param min_version: Minimum acceptable version.
|
||||
:param max_version: Maximum acceptable version.
|
||||
:returns: boolean
|
||||
"""
|
||||
return min_version <= self <= max_version
|
||||
|
||||
def get_string(self):
|
||||
"""Converts object to string representation which is used to create
|
||||
an APIVersionRequest object results in the same version request.
|
||||
"""
|
||||
return "%s.%s" % (self.ver_major, self.ver_minor)
|
||||
|
||||
@classmethod
|
||||
def min_version(cls):
|
||||
"""Minimal allowed api version."""
|
||||
return APIVersionRequest(cls._MIN_API_VERSION)
|
||||
|
||||
@classmethod
|
||||
def max_version(cls):
|
||||
"""Maximal allowed api version."""
|
||||
return APIVersionRequest(cls._MAX_API_VERSION)
|
||||
|
||||
@classmethod
|
||||
def default_version(cls):
|
||||
"""Default api version if no version in request."""
|
||||
return APIVersionRequest(cls._DEFAULT_API_VERSION)
|
||||
@@ -1,172 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import functools
|
||||
|
||||
from glare.api.v1 import api_version_request as api_version
|
||||
from glare.common import exception as exc
|
||||
from glare.i18n import _
|
||||
|
||||
|
||||
class VersionedMethod(object):
|
||||
|
||||
def __init__(self, name, start_version, end_version, func):
|
||||
"""Versioning information for a single method.
|
||||
|
||||
:param name: Name of the method
|
||||
:param start_version: Minimum acceptable version
|
||||
:param end_version: Maximum acceptable_version
|
||||
:param func: Method to call
|
||||
"""
|
||||
# NOTE(kairat): minimums and maximums are inclusive
|
||||
self.name = name
|
||||
self.start_version = start_version
|
||||
self.end_version = end_version
|
||||
self.func = func
|
||||
|
||||
def __str__(self):
|
||||
return ("Version Method %s: min: %s, max: %s"
|
||||
% (self.name, self.start_version, self.end_version))
|
||||
|
||||
|
||||
class VersionedResource(object):
|
||||
"""Versioned mixin that provides ability to define versioned methods and
|
||||
return appropriate methods based on user request.
|
||||
"""
|
||||
|
||||
# prefix for all versioned methods in class
|
||||
VER_METHODS_ATTR_PREFIX = 'versioned_methods_'
|
||||
|
||||
@staticmethod
|
||||
def check_for_versions_intersection(func_list):
|
||||
"""Determines whether function list contains version intervals
|
||||
intersections or not. General algorithm:
|
||||
https://en.wikipedia.org/wiki/Intersection_algorithm
|
||||
|
||||
:param func_list: list of VersionedMethod objects
|
||||
:return: boolean
|
||||
"""
|
||||
pairs = []
|
||||
counter = 0
|
||||
for f in func_list:
|
||||
pairs.append((f.start_version, 1, f))
|
||||
pairs.append((f.end_version, -1, f))
|
||||
|
||||
def compare(x):
|
||||
return x[0]
|
||||
|
||||
pairs.sort(key=compare)
|
||||
for p in pairs:
|
||||
counter += p[1]
|
||||
if counter > 1:
|
||||
return True
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def supported_versions(cls, min_ver, max_ver=None):
|
||||
"""Decorator for versioning api methods.
|
||||
|
||||
Add the decorator to any method which takes a request object
|
||||
as the first parameter and belongs to a class which inherits from
|
||||
wsgi.Controller. The implementation inspired by Nova.
|
||||
|
||||
:param min_ver: string representing minimum version
|
||||
:param max_ver: optional string representing maximum version
|
||||
"""
|
||||
|
||||
def decorator(f):
|
||||
obj_min_ver = api_version.APIVersionRequest(min_ver)
|
||||
if max_ver:
|
||||
obj_max_ver = api_version.APIVersionRequest(max_ver)
|
||||
else:
|
||||
obj_max_ver = api_version.APIVersionRequest.max_version()
|
||||
|
||||
# Add to list of versioned methods registered
|
||||
func_name = f.__name__
|
||||
new_func = VersionedMethod(func_name, obj_min_ver, obj_max_ver, f)
|
||||
|
||||
versioned_attr = cls.VER_METHODS_ATTR_PREFIX + cls.__name__
|
||||
func_dict = getattr(cls, versioned_attr, {})
|
||||
if not func_dict:
|
||||
setattr(cls, versioned_attr, func_dict)
|
||||
|
||||
func_list = func_dict.get(func_name, [])
|
||||
if not func_list:
|
||||
func_dict[func_name] = func_list
|
||||
func_list.append(new_func)
|
||||
|
||||
# Ensure the list is sorted by minimum version (reversed)
|
||||
# so later when we work through the list in order we find
|
||||
# the method which has the latest version which supports
|
||||
# the version requested.
|
||||
is_intersect = cls.check_for_versions_intersection(
|
||||
func_list)
|
||||
|
||||
if is_intersect:
|
||||
raise exc.ApiVersionsIntersect(
|
||||
name=new_func.name,
|
||||
min_ver=new_func.start_version,
|
||||
max_ver=new_func.end_version,
|
||||
)
|
||||
|
||||
func_list.sort(key=lambda vf: vf.start_version, reverse=True)
|
||||
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
def __getattribute__(self, key):
|
||||
def version_select(*args, **kwargs):
|
||||
"""Look for the method which matches the name supplied and version
|
||||
constraints and calls it with the supplied arguments.
|
||||
|
||||
:returns: Returns the result of the method called
|
||||
:raises: VersionNotFoundForAPIMethod if there is no method which
|
||||
matches the name and version constraints
|
||||
"""
|
||||
# versioning is used in 3 classes: request deserializer and
|
||||
# controller have request as first argument
|
||||
# response serializer has response as first argument
|
||||
# we must respect all three cases
|
||||
if hasattr(args[0], 'api_version_request'):
|
||||
ver = args[0].api_version_request
|
||||
elif hasattr(args[0], 'request'):
|
||||
ver = args[0].request.api_version_request
|
||||
else:
|
||||
raise exc.VersionNotFoundForAPIMethod(
|
||||
message=_("Api version not found in the request."))
|
||||
|
||||
func_list = self.versioned_methods[key]
|
||||
for func in func_list:
|
||||
if ver.matches(func.start_version, func.end_version):
|
||||
# Update the version_select wrapper function so
|
||||
# other decorator attributes like wsgi.response
|
||||
# are still respected.
|
||||
functools.update_wrapper(version_select, func.func)
|
||||
return func.func(self, *args, **kwargs)
|
||||
|
||||
# No version match
|
||||
raise exc.VersionNotFoundForAPIMethod(version=ver)
|
||||
|
||||
class_obj = object.__getattribute__(self, '__class__')
|
||||
prefix = object.__getattribute__(self, 'VER_METHODS_ATTR_PREFIX')
|
||||
attr_name = prefix + object.__getattribute__(class_obj, '__name__')
|
||||
try:
|
||||
if key in object.__getattribute__(self, attr_name):
|
||||
return version_select
|
||||
except AttributeError:
|
||||
# No versioning on this class
|
||||
pass
|
||||
|
||||
return object.__getattribute__(self, key)
|
||||
@@ -1,464 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""WSGI Resource definition for Glare. Defines Glare API and serialization/
|
||||
deserialization of incoming requests."""
|
||||
|
||||
import json
|
||||
import jsonpatch
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import encodeutils
|
||||
import six
|
||||
from six.moves import http_client
|
||||
import six.moves.urllib.parse as urlparse
|
||||
|
||||
from glare.api.v1 import api_versioning
|
||||
from glare.common import exception as exc
|
||||
from glare.common import wsgi
|
||||
from glare import engine
|
||||
from glare.i18n import _
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
list_configs = [
|
||||
cfg.IntOpt('default_api_limit', default=25,
|
||||
help=_('Default value for the number of items returned by a '
|
||||
'request if not specified explicitly in the request')),
|
||||
cfg.IntOpt('max_api_limit', default=1000,
|
||||
help=_('Maximum permissible number of items that could be '
|
||||
'returned by a request')),
|
||||
]
|
||||
|
||||
CONF.register_opts(list_configs)
|
||||
|
||||
supported_versions = api_versioning.VersionedResource.supported_versions
|
||||
|
||||
|
||||
class RequestDeserializer(api_versioning.VersionedResource,
|
||||
wsgi.JSONRequestDeserializer):
|
||||
"""Glare deserializer for incoming webob requests.
|
||||
|
||||
Deserializer checks and converts incoming request into a bunch of Glare
|
||||
primitives. So other service components don't work with requests at all.
|
||||
Deserializer also performs primary API validation without any knowledge
|
||||
about concrete artifact type structure.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _get_content_type(req, expected=None):
|
||||
"""Determine content type of the request body."""
|
||||
if "Content-Type" not in req.headers:
|
||||
msg = _("Content-Type must be specified.")
|
||||
LOG.error(msg)
|
||||
raise exc.BadRequest(msg)
|
||||
|
||||
content_type = req.content_type
|
||||
if expected is not None and content_type not in expected:
|
||||
msg = (_('Invalid content type: %(ct)s. Expected: %(exp)s') %
|
||||
{'ct': content_type, 'exp': ', '.join(expected)})
|
||||
raise exc.UnsupportedMediaType(message=msg)
|
||||
|
||||
return content_type
|
||||
|
||||
def _get_request_body(self, req):
|
||||
"""Get request json body and convert it to python structures."""
|
||||
return self.from_json(req.body)
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
def create(self, req):
|
||||
self._get_content_type(req, expected=['application/json'])
|
||||
body = self._get_request_body(req)
|
||||
if not isinstance(body, dict):
|
||||
msg = _("Dictionary expected as body value. Got %s.") % type(body)
|
||||
raise exc.BadRequest(msg)
|
||||
return {'values': body}
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
def list(self, req):
|
||||
params = req.params.copy()
|
||||
marker = params.pop('marker', None)
|
||||
query_params = {}
|
||||
# step 1 - apply marker to query if exists
|
||||
if marker is not None:
|
||||
query_params['marker'] = marker
|
||||
|
||||
# step 2 - apply limit (if exists OR setup default limit)
|
||||
limit = params.pop('limit', CONF.default_api_limit)
|
||||
try:
|
||||
limit = int(limit)
|
||||
except ValueError:
|
||||
msg = _("Limit param must be an integer.")
|
||||
raise exc.BadRequest(message=msg)
|
||||
if limit < 0:
|
||||
msg = _("Limit param must be positive.")
|
||||
raise exc.BadRequest(message=msg)
|
||||
query_params['limit'] = min(CONF.max_api_limit, limit)
|
||||
|
||||
# step 3 - parse sort parameters
|
||||
if 'sort' in params:
|
||||
sort = []
|
||||
for sort_param in params.pop('sort').strip().split(','):
|
||||
key, _sep, direction = sort_param.partition(':')
|
||||
if direction and direction not in ('asc', 'desc'):
|
||||
raise exc.BadRequest('Sort direction must be one of '
|
||||
'["asc", "desc"]. Got %s direction'
|
||||
% direction)
|
||||
sort.append((key, direction or 'desc'))
|
||||
query_params['sort'] = sort
|
||||
|
||||
# step 4 - parse filter parameters
|
||||
filters = []
|
||||
for fname, fval in params.items():
|
||||
if fname == 'version' and fval == 'latest':
|
||||
query_params['latest'] = True
|
||||
else:
|
||||
filters.append((fname, fval))
|
||||
|
||||
query_params['filters'] = filters
|
||||
return query_params
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
def update(self, req):
|
||||
self._get_content_type(
|
||||
req, expected=['application/json-patch+json'])
|
||||
body = self._get_request_body(req)
|
||||
patch = jsonpatch.JsonPatch(body)
|
||||
try:
|
||||
# Initially patch object doesn't validate input. It's only checked
|
||||
# when we call get operation on each method
|
||||
tuple(map(patch._get_operation, patch.patch))
|
||||
except (jsonpatch.InvalidJsonPatch, TypeError, AttributeError,
|
||||
jsonpatch.JsonPointerException):
|
||||
msg = _("Json Patch body is malformed")
|
||||
raise exc.BadRequest(msg)
|
||||
return {'patch': patch}
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
def upload_blob(self, req):
|
||||
content_type = self._get_content_type(req)
|
||||
if content_type == ('application/vnd+openstack.glare-custom-location'
|
||||
'+json'):
|
||||
data = self._get_request_body(req)
|
||||
if 'url' not in data:
|
||||
msg = _("url is required when specifying external location. "
|
||||
"Cannot find 'url' in request body: %s") % str(data)
|
||||
raise exc.BadRequest(msg)
|
||||
if 'md5' not in data:
|
||||
msg = _("Incorrect blob metadata. MD5 must be specified "
|
||||
"for external location in artifact blob.")
|
||||
raise exc.BadRequest(msg)
|
||||
else:
|
||||
data = req.body_file
|
||||
|
||||
if self.is_valid_encoding(req) and self.is_valid_method(req):
|
||||
req.is_body_readable = True
|
||||
|
||||
return {'data': data, 'content_type': content_type}
|
||||
|
||||
|
||||
def log_request_progress(f):
|
||||
def log_decorator(self, req, *args, **kwargs):
|
||||
LOG.debug("Request %(request_id)s for %(api_method)s successfully "
|
||||
"deserialized. Pass request parameters to Engine",
|
||||
{'request_id': req.context.request_id,
|
||||
'api_method': f.__name__})
|
||||
result = f(self, req, *args, **kwargs)
|
||||
LOG.info(
|
||||
"Request %(request_id)s for artifact %(api_method)s "
|
||||
"successfully executed.", {'request_id': req.context.request_id,
|
||||
'api_method': f.__name__})
|
||||
return result
|
||||
return log_decorator
|
||||
|
||||
|
||||
class ArtifactsController(api_versioning.VersionedResource):
|
||||
"""API controller for Glare Artifacts.
|
||||
|
||||
Artifact Controller prepares incoming data for Glare Engine and redirects
|
||||
data to the appropriate engine method. Once the response data is returned
|
||||
from the engine Controller passes it next to Response Serializer.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.engine = engine.Engine()
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
@log_request_progress
|
||||
def list_type_schemas(self, req):
|
||||
"""List of detailed descriptions of enabled artifact types.
|
||||
|
||||
:param req: user request
|
||||
:return: list of json-schemas of all enabled artifact types.
|
||||
"""
|
||||
return self.engine.show_type_schemas(req.context)
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
@log_request_progress
|
||||
def show_type_schema(self, req, type_name):
|
||||
"""Get detailed artifact type description.
|
||||
|
||||
:param req: user request
|
||||
:param type_name: artifact type name
|
||||
:return: json-schema representation of artifact type
|
||||
"""
|
||||
type_schema = self.engine.show_type_schemas(req.context, type_name)
|
||||
return {type_name: type_schema}
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
@log_request_progress
|
||||
def create(self, req, type_name, values):
|
||||
"""Create artifact record in Glare.
|
||||
|
||||
:param req: user request
|
||||
:param type_name: artifact type name
|
||||
:param values: dict with artifact fields
|
||||
:return: definition of created artifact
|
||||
"""
|
||||
if req.context.tenant is None or req.context.read_only:
|
||||
msg = _("It's forbidden to anonymous users to create artifacts.")
|
||||
raise exc.Forbidden(msg)
|
||||
if not values.get('name'):
|
||||
msg = _("Name must be specified at creation.")
|
||||
raise exc.BadRequest(msg)
|
||||
for field in ('visibility', 'status'):
|
||||
if field in values:
|
||||
msg = _("%s is not allowed in a request at creation.") % field
|
||||
raise exc.BadRequest(msg)
|
||||
return self.engine.create(req.context, type_name, values)
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
@log_request_progress
|
||||
def update(self, req, type_name, artifact_id, patch):
|
||||
"""Update artifact record in Glare.
|
||||
|
||||
:param req: User request
|
||||
:param type_name: Artifact type name
|
||||
:param artifact_id: id of artifact to update
|
||||
:param patch: json patch with artifact changes
|
||||
:return: definition of updated artifact
|
||||
"""
|
||||
return self.engine.save(req.context, type_name, artifact_id, patch)
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
@log_request_progress
|
||||
def delete(self, req, type_name, artifact_id):
|
||||
"""Delete artifact from Glare.
|
||||
|
||||
:param req: User request
|
||||
:param type_name: Artifact type name
|
||||
:param artifact_id: id of artifact to delete
|
||||
"""
|
||||
return self.engine.delete(req.context, type_name, artifact_id)
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
@log_request_progress
|
||||
def show(self, req, type_name, artifact_id):
|
||||
"""Show detailed artifact info.
|
||||
|
||||
:param req: User request
|
||||
:param type_name: Artifact type name
|
||||
:param artifact_id: id of artifact to show
|
||||
:return: definition of requested artifact
|
||||
"""
|
||||
return self.engine.show(req.context, type_name, artifact_id)
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
@log_request_progress
|
||||
def list(self, req, type_name, filters=None, marker=None, limit=None,
|
||||
sort=None, latest=False):
|
||||
"""List available artifacts.
|
||||
|
||||
:param req: User request
|
||||
:param type_name: Artifact type name
|
||||
:param filters: filters that need to be applied to artifact
|
||||
:param marker: the artifact that considered as begin of the list
|
||||
so all artifacts before marker (including marker itself) will not be
|
||||
added to artifact list
|
||||
:param limit: maximum number of items in list
|
||||
:param sort: sorting options
|
||||
:param latest: flag that indicates, that only artifacts with highest
|
||||
versions should be returned in output
|
||||
:return: list of requested artifact definitions
|
||||
"""
|
||||
artifacts = self.engine.list(req.context, type_name, filters, marker,
|
||||
limit, sort, latest)
|
||||
result = {'artifacts': artifacts,
|
||||
'type_name': type_name}
|
||||
if len(artifacts) != 0 and len(artifacts) == limit:
|
||||
result['next_marker'] = artifacts[-1]['id']
|
||||
return result
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
@log_request_progress
|
||||
def upload_blob(self, req, type_name, artifact_id, blob_path, data,
|
||||
content_type):
|
||||
"""Upload blob into Glare repo.
|
||||
|
||||
:param req: User request
|
||||
:param type_name: Artifact type name
|
||||
:param artifact_id: id of artifact where to perform upload
|
||||
:param blob_path: path to artifact blob
|
||||
:param data: blob payload
|
||||
:param content_type: data content-type
|
||||
:return: definition of requested artifact with uploaded blob
|
||||
"""
|
||||
field_name, _sep, blob_key = blob_path.partition('/')
|
||||
if not blob_key:
|
||||
blob_key = None
|
||||
if content_type == ('application/vnd+openstack.glare-custom-location'
|
||||
'+json'):
|
||||
url = data.pop('url')
|
||||
return self.engine.add_blob_location(
|
||||
req.context, type_name, artifact_id, field_name, url, data,
|
||||
blob_key)
|
||||
else:
|
||||
return self.engine.upload_blob(
|
||||
req.context, type_name, artifact_id, field_name, data,
|
||||
content_type, blob_key)
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
@log_request_progress
|
||||
def download_blob(self, req, type_name, artifact_id, blob_path):
|
||||
"""Download blob data from Artifact.
|
||||
|
||||
:param req: User request
|
||||
:param type_name: artifact type name
|
||||
:param artifact_id: id of artifact from where to perform download
|
||||
:param blob_path: path to artifact blob
|
||||
:return: requested blob data
|
||||
"""
|
||||
field_name, _sep, blob_key = blob_path.partition('/')
|
||||
if not blob_key:
|
||||
blob_key = None
|
||||
data, meta = self.engine.download_blob(
|
||||
req.context, type_name, artifact_id, field_name, blob_key)
|
||||
result = {'data': data, 'meta': meta}
|
||||
return result
|
||||
|
||||
|
||||
class ResponseSerializer(api_versioning.VersionedResource,
|
||||
wsgi.JSONResponseSerializer):
|
||||
"""Glare serializer for outgoing responses.
|
||||
|
||||
Converts data received from the engine to WSGI responses. It also
|
||||
specifies proper response status and content type as declared in the API.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _prepare_json_response(response, result,
|
||||
content_type='application/json'):
|
||||
body = json.dumps(result, ensure_ascii=False)
|
||||
response.text = six.text_type(body)
|
||||
response.content_type = content_type + '; charset=UTF-8'
|
||||
|
||||
def list_type_schemas(self, response, type_schemas):
|
||||
self._prepare_json_response(response,
|
||||
{'schemas': type_schemas},
|
||||
content_type='application/schema+json')
|
||||
|
||||
def show_type_schema(self, response, type_schema):
|
||||
self._prepare_json_response(response,
|
||||
{'schemas': type_schema},
|
||||
content_type='application/schema+json')
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
def list_schemas(self, response, type_list):
|
||||
self._prepare_json_response(response, {'types': type_list})
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
def create(self, response, artifact):
|
||||
self._prepare_json_response(response, artifact)
|
||||
response.status_int = http_client.CREATED
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
def show(self, response, artifact):
|
||||
self._prepare_json_response(response, artifact)
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
def update(self, response, artifact):
|
||||
self._prepare_json_response(response, artifact)
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
def list(self, response, af_list):
|
||||
params = dict(response.request.params)
|
||||
params.pop('marker', None)
|
||||
|
||||
encode_params = {}
|
||||
for key, value in params.items():
|
||||
encode_params[key] = encodeutils.safe_encode(value)
|
||||
query = urlparse.urlencode(encode_params)
|
||||
|
||||
type_name = af_list['type_name']
|
||||
body = {
|
||||
type_name: af_list['artifacts'],
|
||||
'first': '/artifacts/%s' % type_name,
|
||||
'schema': '/schemas/%s' % type_name,
|
||||
}
|
||||
if query:
|
||||
body['first'] = '%s?%s' % (body['first'], query)
|
||||
if 'next_marker' in af_list:
|
||||
params['marker'] = af_list['next_marker']
|
||||
next_query = urlparse.urlencode(params)
|
||||
body['next'] = '/artifacts/%s?%s' % (type_name, next_query)
|
||||
response.unicode_body = six.text_type(json.dumps(body,
|
||||
ensure_ascii=False))
|
||||
response.content_type = 'application/json'
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
def delete(self, response, result):
|
||||
response.status_int = http_client.NO_CONTENT
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
def upload_blob(self, response, artifact):
|
||||
self._prepare_json_response(response, artifact)
|
||||
|
||||
@staticmethod
|
||||
def _serialize_blob(response, result):
|
||||
data, meta = result['data'], result['meta']
|
||||
response.app_iter = iter(data)
|
||||
response.headers['Content-Type'] = meta['content_type']
|
||||
response.headers['Content-MD5'] = meta['md5']
|
||||
response.headers['X-Openstack-Glare-Content-SHA1'] = meta['sha1']
|
||||
response.headers['X-Openstack-Glare-Content-SHA256'] = meta['sha256']
|
||||
response.content_length = str(meta['size'])
|
||||
|
||||
@staticmethod
|
||||
def _serialize_location(response, result):
|
||||
data, meta = result['data'], result['meta']
|
||||
response.headers['Content-MD5'] = meta['md5']
|
||||
response.headers['X-Openstack-Glare-Content-SHA1'] = meta['sha1']
|
||||
response.headers['X-Openstack-Glare-Content-SHA256'] = meta['sha256']
|
||||
response.location = data['url']
|
||||
response.content_type = 'application/json'
|
||||
response.status = http_client.MOVED_PERMANENTLY
|
||||
response.content_length = 0
|
||||
|
||||
@supported_versions(min_ver='1.0')
|
||||
def download_blob(self, response, result):
|
||||
external = result['meta']['external']
|
||||
if external:
|
||||
self._serialize_location(response, result)
|
||||
else:
|
||||
self._serialize_blob(response, result)
|
||||
|
||||
|
||||
def create_resource():
|
||||
"""Artifact resource factory method."""
|
||||
deserializer = RequestDeserializer()
|
||||
serializer = ResponseSerializer()
|
||||
controller = ArtifactsController()
|
||||
return wsgi.Resource(controller, deserializer, serializer)
|
||||
@@ -1,100 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from glare.api.v1 import resource
|
||||
from glare.common import wsgi
|
||||
|
||||
|
||||
class API(wsgi.Router):
|
||||
"""WSGI router for Glare v1 API requests.
|
||||
|
||||
API Router redirects incoming requests to appropriate WSGI resource method.
|
||||
"""
|
||||
|
||||
def __init__(self, mapper):
|
||||
|
||||
glare_resource = resource.create_resource()
|
||||
reject_method_resource = wsgi.Resource(wsgi.RejectMethodController())
|
||||
|
||||
# ---schemas---
|
||||
mapper.connect('/schemas',
|
||||
controller=glare_resource,
|
||||
action='list_type_schemas',
|
||||
conditions={'method': ['GET']},
|
||||
body_reject=True)
|
||||
mapper.connect('/schemas',
|
||||
controller=reject_method_resource,
|
||||
action='reject',
|
||||
allowed_methods='GET')
|
||||
|
||||
mapper.connect('/schemas/{type_name}',
|
||||
controller=glare_resource,
|
||||
action='show_type_schema',
|
||||
conditions={'method': ['GET']},
|
||||
body_reject=True)
|
||||
mapper.connect('/schemas/{type_name}',
|
||||
controller=reject_method_resource,
|
||||
action='reject',
|
||||
allowed_methods='GET')
|
||||
|
||||
# ---artifacts---
|
||||
mapper.connect('/artifacts/{type_name}',
|
||||
controller=glare_resource,
|
||||
action='list',
|
||||
conditions={'method': ['GET']},
|
||||
body_reject=True)
|
||||
mapper.connect('/artifacts/{type_name}',
|
||||
controller=glare_resource,
|
||||
action='create',
|
||||
conditions={'method': ['POST']})
|
||||
mapper.connect('/artifacts/{type_name}',
|
||||
controller=reject_method_resource,
|
||||
action='reject',
|
||||
allowed_methods='GET, POST')
|
||||
|
||||
mapper.connect('/artifacts/{type_name}/{artifact_id}',
|
||||
controller=glare_resource,
|
||||
action='update',
|
||||
conditions={'method': ['PATCH']})
|
||||
mapper.connect('/artifacts/{type_name}/{artifact_id}',
|
||||
controller=glare_resource,
|
||||
action='show',
|
||||
conditions={'method': ['GET']},
|
||||
body_reject=True)
|
||||
mapper.connect('/artifacts/{type_name}/{artifact_id}',
|
||||
controller=glare_resource,
|
||||
action='delete',
|
||||
conditions={'method': ['DELETE']},
|
||||
body_reject=True)
|
||||
mapper.connect('/artifacts/{type_name}/{artifact_id}',
|
||||
controller=reject_method_resource,
|
||||
action='reject',
|
||||
allowed_methods='GET, PATCH, DELETE')
|
||||
|
||||
# ---blobs---
|
||||
mapper.connect('/artifacts/{type_name}/{artifact_id}/{blob_path:.*?}',
|
||||
controller=glare_resource,
|
||||
action='download_blob',
|
||||
conditions={'method': ['GET']},
|
||||
body_reject=True)
|
||||
mapper.connect('/artifacts/{type_name}/{artifact_id}/{blob_path:.*?}',
|
||||
controller=glare_resource,
|
||||
action='upload_blob',
|
||||
conditions={'method': ['PUT']})
|
||||
mapper.connect('/artifacts/{type_name}/{artifact_id}/{blob_path:.*?}',
|
||||
controller=reject_method_resource,
|
||||
action='reject',
|
||||
allowed_methods='GET, PUT')
|
||||
|
||||
super(API, self).__init__(mapper)
|
||||
@@ -1,96 +0,0 @@
|
||||
# Copyright 2012 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_serialization import jsonutils
|
||||
from six.moves import http_client
|
||||
import webob.dec
|
||||
|
||||
from glare.api.v1 import api_version_request
|
||||
from glare.i18n import _
|
||||
|
||||
|
||||
versions_opts = [
|
||||
cfg.StrOpt('public_endpoint',
|
||||
help=_("""
|
||||
Public url endpoint to use for Glare versions response.
|
||||
|
||||
This is the public url endpoint that will appear in the Glare
|
||||
"versions" response. If no value is specified, the endpoint that is
|
||||
displayed in the version's response is that of the host running the
|
||||
API service. Change the endpoint to represent the proxy URL if the
|
||||
API service is running behind a proxy. If the service is running
|
||||
behind a load balancer, add the load balancer's URL for this value.
|
||||
|
||||
Services which consume this:
|
||||
* glare
|
||||
|
||||
Possible values:
|
||||
* None
|
||||
* Proxy URL
|
||||
* Load balancer URL
|
||||
|
||||
Related options:
|
||||
* None
|
||||
|
||||
""")),
|
||||
]
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(versions_opts)
|
||||
|
||||
|
||||
class Controller(object):
|
||||
|
||||
"""A controller that reports which API versions are supported."""
|
||||
|
||||
@staticmethod
|
||||
def index(req, is_multi):
|
||||
"""Respond to a request for all OpenStack API versions.
|
||||
|
||||
:param is_multi: defines if multiple choices should be response status
|
||||
or not
|
||||
:param req: user request object
|
||||
:return: list of supported API versions
|
||||
"""
|
||||
def build_version_object(max_version, min_version, status, path=None):
|
||||
url = CONF.public_endpoint or req.host_url
|
||||
return {
|
||||
'id': 'v%s' % max_version,
|
||||
'links': [
|
||||
{
|
||||
'rel': 'self',
|
||||
'href': '%s/%s/' % (url, path) if path else
|
||||
'%s/' % url,
|
||||
},
|
||||
],
|
||||
'status': status,
|
||||
'min_version': min_version,
|
||||
'version': max_version
|
||||
}
|
||||
|
||||
microv_max = api_version_request.APIVersionRequest.max_version()
|
||||
microv_min = api_version_request.APIVersionRequest.min_version()
|
||||
version_objs = [build_version_object(microv_max.get_string(),
|
||||
microv_min.get_string(),
|
||||
'EXPERIMENTAL')]
|
||||
return_status = (http_client.MULTIPLE_CHOICES if is_multi else
|
||||
http_client.OK)
|
||||
response = webob.Response(request=req,
|
||||
status=return_status,
|
||||
content_type='application/json')
|
||||
response.body = jsonutils.dump_as_bytes(dict(versions=version_objs))
|
||||
return response
|
||||
@@ -1,53 +0,0 @@
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import oslo_utils.strutils as strutils
|
||||
|
||||
from glare import i18n
|
||||
|
||||
try:
|
||||
import dns # noqa
|
||||
except ImportError:
|
||||
dnspython_installed = False
|
||||
else:
|
||||
dnspython_installed = True
|
||||
|
||||
|
||||
def fix_greendns_ipv6():
|
||||
if dnspython_installed:
|
||||
# All of this is because if dnspython is present in your environment
|
||||
# then eventlet monkeypatches socket.getaddrinfo() with an
|
||||
# implementation which doesn't work for IPv6. What we're checking here
|
||||
# is that the magic environment variable was set when the import
|
||||
# happened.
|
||||
nogreendns = 'EVENTLET_NO_GREENDNS'
|
||||
flag = os.environ.get(nogreendns, '')
|
||||
if 'eventlet' in sys.modules and not strutils.bool_from_string(flag):
|
||||
msg = i18n._("It appears that the eventlet module has been "
|
||||
"imported prior to setting %s='yes'. It is currently "
|
||||
"necessary to disable eventlet.greendns "
|
||||
"if using ipv6 since eventlet.greendns currently "
|
||||
"breaks with ipv6 addresses. Please ensure that "
|
||||
"eventlet is not imported prior to this being set.")
|
||||
raise ImportError(msg % nogreendns)
|
||||
|
||||
os.environ[nogreendns] = 'yes'
|
||||
|
||||
|
||||
i18n.enable_lazy()
|
||||
fix_greendns_ipv6()
|
||||
@@ -1,90 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
"""
|
||||
Glare (Glare Artifact Repository) API service.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import eventlet
|
||||
from oslo_utils import encodeutils
|
||||
|
||||
eventlet.patcher.monkey_patch(all=False, socket=True, time=True,
|
||||
select=True, thread=True, os=True)
|
||||
|
||||
# If ../glare/__init__.py exists, add ../ to Python search path, so that
|
||||
# it will override what happens to be installed in /usr/(local/)lib/python...
|
||||
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||
os.pardir,
|
||||
os.pardir))
|
||||
if os.path.exists(os.path.join(possible_topdir, 'glare', '__init__.py')):
|
||||
sys.path.insert(0, possible_topdir)
|
||||
|
||||
import glance_store
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from osprofiler import initializer
|
||||
|
||||
from glare.common import config
|
||||
from glare.common import exception
|
||||
from glare.common import wsgi
|
||||
from glare import notification
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.import_group("profiler", "glare.common.wsgi")
|
||||
logging.register_options(CONF)
|
||||
|
||||
KNOWN_EXCEPTIONS = (RuntimeError,
|
||||
exception.WorkerCreationFailure,
|
||||
glance_store.exceptions.BadStoreConfiguration)
|
||||
|
||||
|
||||
def fail(e):
|
||||
global KNOWN_EXCEPTIONS
|
||||
return_code = KNOWN_EXCEPTIONS.index(type(e)) + 1
|
||||
sys.stderr.write("ERROR: %s\n" % encodeutils.exception_to_unicode(e))
|
||||
sys.exit(return_code)
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
config.parse_args()
|
||||
wsgi.set_eventlet_hub()
|
||||
logging.setup(CONF, 'glare')
|
||||
notification.set_defaults()
|
||||
|
||||
if CONF.profiler.enabled:
|
||||
initializer.init_from_conf(
|
||||
conf=CONF,
|
||||
context={},
|
||||
project="glare",
|
||||
service="api",
|
||||
host=CONF.bind_host
|
||||
)
|
||||
|
||||
server = wsgi.Server(initialize_glance_store=True)
|
||||
server.start(config.load_paste_app('glare-api'), default_port=9494)
|
||||
server.wait()
|
||||
except KNOWN_EXCEPTIONS as e:
|
||||
fail(e)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,80 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_db import options
|
||||
|
||||
from glare.db.migration import migration
|
||||
|
||||
CONF = cfg.CONF
|
||||
options.set_defaults(CONF)
|
||||
|
||||
|
||||
class DBCommand(object):
|
||||
|
||||
def upgrade(self, config):
|
||||
migration.upgrade(CONF.command.revision, config=config)
|
||||
|
||||
def downgrade(self, config):
|
||||
migration.downgrade(CONF.command.revision, config=config)
|
||||
|
||||
def revision(self, config):
|
||||
migration.revision(CONF.command.message,
|
||||
CONF.command.autogenerate,
|
||||
config=config)
|
||||
|
||||
def stamp(self, config):
|
||||
migration.stamp(CONF.command.revision, config=config)
|
||||
|
||||
def version(self, config):
|
||||
print(migration.version())
|
||||
|
||||
|
||||
def add_command_parsers(subparsers):
|
||||
command_object = DBCommand()
|
||||
|
||||
parser = subparsers.add_parser('upgrade')
|
||||
parser.set_defaults(func=command_object.upgrade)
|
||||
parser.add_argument('--revision', nargs='?')
|
||||
|
||||
parser = subparsers.add_parser('downgrade')
|
||||
parser.set_defaults(func=command_object.downgrade)
|
||||
parser.add_argument('--revision', nargs='?')
|
||||
|
||||
parser = subparsers.add_parser('stamp')
|
||||
parser.add_argument('--revision', nargs='?')
|
||||
parser.set_defaults(func=command_object.stamp)
|
||||
|
||||
parser = subparsers.add_parser('revision')
|
||||
parser.add_argument('-m', '--message')
|
||||
parser.add_argument('--autogenerate', action='store_true')
|
||||
parser.set_defaults(func=command_object.revision)
|
||||
|
||||
parser = subparsers.add_parser('version')
|
||||
parser.set_defaults(func=command_object.version)
|
||||
|
||||
|
||||
command_opt = cfg.SubCommandOpt('command',
|
||||
title='Command',
|
||||
help='Available commands',
|
||||
handler=add_command_parsers)
|
||||
|
||||
CONF.register_cli_opt(command_opt)
|
||||
|
||||
|
||||
def main():
|
||||
config = migration.get_alembic_config()
|
||||
CONF(project='glare')
|
||||
CONF.command.func(config)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,73 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright 2017 - Nokia Networks
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
Glare Scrub Service
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
# If ../glare/__init__.py exists, add ../ to Python search path, so that
|
||||
# it will override what happens to be installed in /usr/(local/)lib/python...
|
||||
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||
os.pardir,
|
||||
os.pardir))
|
||||
if os.path.exists(os.path.join(possible_topdir, 'glare', '__init__.py')):
|
||||
sys.path.insert(0, possible_topdir)
|
||||
import eventlet
|
||||
|
||||
import glance_store
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
|
||||
from glare.common import config
|
||||
from glare import scrubber
|
||||
|
||||
eventlet.patcher.monkey_patch(all=False, socket=True, time=True, select=True,
|
||||
thread=True, os=True)
|
||||
|
||||
CONF = cfg.CONF
|
||||
logging.register_options(CONF)
|
||||
CONF.set_default(name='use_stderr', default=True)
|
||||
|
||||
|
||||
def main():
|
||||
CONF.register_cli_opts(scrubber.scrubber_cmd_cli_opts, group='scrubber')
|
||||
CONF.register_opts(scrubber.scrubber_cmd_opts, group='scrubber')
|
||||
|
||||
try:
|
||||
config.parse_args()
|
||||
logging.setup(CONF, 'glare')
|
||||
|
||||
glance_store.register_opts(config.CONF)
|
||||
glance_store.create_stores(config.CONF)
|
||||
glance_store.verify_default_store()
|
||||
|
||||
app = scrubber.Scrubber()
|
||||
|
||||
if CONF.scrubber.daemon:
|
||||
server = scrubber.Daemon(CONF.scrubber.wakeup_time)
|
||||
server.start(app)
|
||||
server.wait()
|
||||
else:
|
||||
app.run()
|
||||
except RuntimeError as e:
|
||||
sys.exit("ERROR: %s" % e)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,155 +0,0 @@
|
||||
# Copyright 2011 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Routines for configuring Glare.
|
||||
"""
|
||||
|
||||
import logging.config
|
||||
import logging.handlers
|
||||
import os
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_policy import policy
|
||||
from paste import deploy
|
||||
|
||||
from glare.i18n import _
|
||||
|
||||
paste_deploy_opts = [
|
||||
cfg.StrOpt('flavor',
|
||||
help=_('Partial name of a pipeline in your paste configuration '
|
||||
'file with the service name removed. For example, if '
|
||||
'your paste section name is '
|
||||
'[pipeline:glare-keystone] use the value '
|
||||
'"keystone"')),
|
||||
cfg.StrOpt('config_file',
|
||||
help=_('Name of the paste configuration file.')),
|
||||
]
|
||||
|
||||
common_opts = [
|
||||
cfg.StrOpt('digest_algorithm',
|
||||
default='sha256',
|
||||
help=_("""
|
||||
Digest algorithm to use for digital signature.
|
||||
|
||||
Provide a string value representing the digest algorithm to
|
||||
use for generating digital signatures. By default, ``sha256``
|
||||
is used.
|
||||
|
||||
To get a list of the available algorithms supported by the version
|
||||
of OpenSSL on your platform, run the command:
|
||||
``openssl list-message-digest-algorithms``.
|
||||
Examples are 'sha1', 'sha256', and 'sha512'.
|
||||
|
||||
Possible values:
|
||||
* An OpenSSL message digest algorithm identifier
|
||||
|
||||
Relation options:
|
||||
* None
|
||||
|
||||
""")),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(paste_deploy_opts, group='paste_deploy')
|
||||
CONF.register_opts(common_opts)
|
||||
policy.Enforcer(CONF)
|
||||
|
||||
|
||||
def parse_args(args=None, usage=None, default_config_files=None):
|
||||
CONF(args=args,
|
||||
project='glare',
|
||||
usage=usage,
|
||||
default_config_files=default_config_files)
|
||||
|
||||
|
||||
def _get_deployment_flavor(flavor=None):
|
||||
"""Retrieve the paste_deploy.flavor config item, formatted appropriately
|
||||
for appending to the application name.
|
||||
|
||||
:param flavor: if specified, use this setting rather than the
|
||||
paste_deploy.flavor configuration setting
|
||||
"""
|
||||
if not flavor:
|
||||
flavor = CONF.paste_deploy.flavor
|
||||
return '' if not flavor else ('-' + flavor)
|
||||
|
||||
|
||||
def _get_paste_config_path():
|
||||
paste_suffix = '-paste.ini'
|
||||
conf_suffix = '.conf'
|
||||
if CONF.config_file:
|
||||
# Assume paste config is in a paste.ini file corresponding
|
||||
# to the last config file
|
||||
path = CONF.config_file[-1].replace(conf_suffix, paste_suffix)
|
||||
else:
|
||||
path = CONF.prog + paste_suffix
|
||||
return CONF.find_file(os.path.basename(path))
|
||||
|
||||
|
||||
def _get_deployment_config_file():
|
||||
"""Retrieve the deployment_config_file config item, formatted as an
|
||||
absolute pathname.
|
||||
"""
|
||||
path = CONF.paste_deploy.config_file
|
||||
if not path:
|
||||
path = _get_paste_config_path()
|
||||
if not path:
|
||||
msg = _("Unable to locate paste config file for %s.") % CONF.prog
|
||||
raise RuntimeError(msg)
|
||||
return os.path.abspath(path)
|
||||
|
||||
|
||||
def load_paste_app(app_name, flavor=None, conf_file=None):
|
||||
"""Builds and returns a WSGI app from a paste config file.
|
||||
|
||||
We assume the last config file specified in the supplied ConfigOpts
|
||||
object is the paste config file, if conf_file is None.
|
||||
|
||||
:param app_name: name of the application to load
|
||||
:param flavor: name of the variant of the application to load
|
||||
:param conf_file: path to the paste config file
|
||||
|
||||
:raises: RuntimeError when config file cannot be located or application
|
||||
cannot be loaded from config file
|
||||
"""
|
||||
# append the deployment flavor to the application name,
|
||||
# in order to identify the appropriate paste pipeline
|
||||
app_name += _get_deployment_flavor(flavor)
|
||||
|
||||
if not conf_file:
|
||||
conf_file = _get_deployment_config_file()
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
try:
|
||||
logger.debug("Loading %(app_name)s from %(conf_file)s",
|
||||
{'conf_file': conf_file, 'app_name': app_name})
|
||||
|
||||
app = deploy.loadapp("config:%s" % conf_file, name=app_name)
|
||||
|
||||
# Log the options used when starting if we're in debug mode...
|
||||
if CONF.debug:
|
||||
CONF.log_opt_values(logger, logging.DEBUG)
|
||||
|
||||
return app
|
||||
except (LookupError, ImportError) as e:
|
||||
msg = (_("Unable to load %(app_name)s from "
|
||||
"configuration file %(conf_file)s."
|
||||
"\nGot: %(e)r") % {'app_name': app_name,
|
||||
'conf_file': conf_file,
|
||||
'e': e})
|
||||
logger.error(msg)
|
||||
raise RuntimeError(msg)
|
||||
@@ -1,166 +0,0 @@
|
||||
# Copyright 2016 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from oslo_log import log as logging
|
||||
import six
|
||||
|
||||
from glare.i18n import _
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GlareException(Exception):
|
||||
"""Base Glare Exception class.
|
||||
|
||||
To correctly use this class, inherit from it and define
|
||||
a 'message' property. That message will get printf'd
|
||||
with the keyword arguments provided to the constructor.
|
||||
"""
|
||||
message = _("An unknown exception occurred")
|
||||
|
||||
def __init__(self, message=None, **kwargs):
|
||||
if message:
|
||||
self.message = message
|
||||
self.kwargs = kwargs
|
||||
if self.kwargs:
|
||||
self.message = self.message % kwargs
|
||||
LOG.error(self.message)
|
||||
super(GlareException, self).__init__(self.message)
|
||||
|
||||
def __unicode__(self):
|
||||
return six.text_type(self.message)
|
||||
|
||||
|
||||
class BadRequest(GlareException):
|
||||
message = _("Bad request")
|
||||
|
||||
|
||||
class InvalidParameterValue(BadRequest):
|
||||
message = _("Invalid filter value ")
|
||||
|
||||
|
||||
class InvalidFilterOperatorValue(BadRequest):
|
||||
msg = _("Unable to filter by unknown operator.")
|
||||
|
||||
|
||||
class InvalidVersion(GlareException):
|
||||
message = _("Provided version is invalid")
|
||||
|
||||
|
||||
class NotAcceptable(GlareException):
|
||||
message = _("Not acceptable")
|
||||
|
||||
|
||||
class InvalidGlobalAPIVersion(NotAcceptable):
|
||||
message = _("Version %(req_ver)s is not supported by the API. Minimum "
|
||||
"is %(min_ver)s and maximum is %(max_ver)s.")
|
||||
|
||||
|
||||
class VersionNotFoundForAPIMethod(GlareException):
|
||||
message = _("API version %(version)s is not supported on this method.")
|
||||
|
||||
|
||||
class ApiVersionsIntersect(GlareException):
|
||||
message = _("Version of %(name)s %(min_ver)s %(max_ver)s intersects "
|
||||
"with another versions.")
|
||||
|
||||
|
||||
class Unauthorized(GlareException):
|
||||
message = _('You are not authenticated')
|
||||
|
||||
|
||||
class Forbidden(GlareException):
|
||||
message = _("You are not authorized to complete this action.")
|
||||
|
||||
|
||||
class PolicyException(Forbidden):
|
||||
message = _("Policy check for %(policy_name)s "
|
||||
"failed with user credentials.")
|
||||
|
||||
|
||||
class NotFound(GlareException):
|
||||
message = _("An object with the specified identifier was not found.")
|
||||
|
||||
|
||||
class TypeNotFound(NotFound):
|
||||
message = _("Glare type with name '%(name)s' was not found.")
|
||||
|
||||
|
||||
class IncorrectArtifactType(GlareException):
|
||||
message = _("Artifact type is incorrect: %(explanation)s")
|
||||
|
||||
|
||||
class ArtifactNotFound(NotFound):
|
||||
message = _("Artifact with type name '%(type_name)s' and id '%(id)s' was "
|
||||
"not found.")
|
||||
|
||||
|
||||
class RequestTimeout(GlareException):
|
||||
message = _("The client did not produce a request within the time "
|
||||
"that the server was prepared to wait.")
|
||||
|
||||
|
||||
class Conflict(GlareException):
|
||||
message = _("The request could not be completed due to a conflict "
|
||||
"with the current state of the resource.")
|
||||
|
||||
|
||||
class Gone(GlareException):
|
||||
message = _("The requested resource is no longer available at the "
|
||||
"server and no forwarding address is known.")
|
||||
|
||||
|
||||
class PreconditionFailed(GlareException):
|
||||
message = _("The precondition given in one or more of the request-header "
|
||||
"fields evaluated to false when it was tested on the server.")
|
||||
|
||||
|
||||
class RequestEntityTooLarge(GlareException):
|
||||
message = _("The server is refusing to process a request because the "
|
||||
"request entity is larger than the server is willing or "
|
||||
"able to process.")
|
||||
|
||||
|
||||
class RequestRangeNotSatisfiable(GlareException):
|
||||
message = _("The request included a Range request-header field, and none "
|
||||
"of the range-specifier values in this field overlap the "
|
||||
"current extent of the selected resource, and the request "
|
||||
"did not include an If-Range request-header field.")
|
||||
|
||||
|
||||
class Locked(GlareException):
|
||||
message = _('The resource is locked.')
|
||||
|
||||
|
||||
class FailedDependency(GlareException):
|
||||
message = _('The method could not be performed because the requested '
|
||||
'action depended on another action and that action failed.')
|
||||
|
||||
|
||||
class UnsupportedMediaType(GlareException):
|
||||
message = _("Unsupported media type.")
|
||||
|
||||
|
||||
class SIGHUPInterrupt(GlareException):
|
||||
message = _("System SIGHUP signal received.")
|
||||
|
||||
|
||||
class WorkerCreationFailure(GlareException):
|
||||
message = _("Server worker creation failed: %(reason)s.")
|
||||
|
||||
|
||||
class DBNotAllowed(GlareException):
|
||||
msg_fmt = _('This operation is not allowed with current DB')
|
||||
@@ -1,128 +0,0 @@
|
||||
# Copyright 2011-2016 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Glare policy operations inspired by Nova implementation."""
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_policy import policy
|
||||
|
||||
from glare.common import exception
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
_ENFORCER = None
|
||||
|
||||
|
||||
artifact_policy_rules = [
|
||||
policy.RuleDefault('context_is_admin', 'role:admin'),
|
||||
policy.RuleDefault('admin_or_owner',
|
||||
'is_admin:True or project_id:%(owner)s'),
|
||||
policy.RuleDefault("artifact:type_list", "",
|
||||
"Policy to request list of artifact types"),
|
||||
policy.RuleDefault("artifact:create", "", "Policy to create artifact."),
|
||||
policy.RuleDefault("artifact:update_public",
|
||||
"'public':%(visibility)s and rule:context_is_admin "
|
||||
"or not 'public':%(visibility)s",
|
||||
"Policy to update public artifact"),
|
||||
policy.RuleDefault("artifact:update", "rule:admin_or_owner and "
|
||||
"rule:artifact:update_public",
|
||||
"Policy to update artifact"),
|
||||
policy.RuleDefault("artifact:activate", "rule:admin_or_owner",
|
||||
"Policy to activate artifact"),
|
||||
policy.RuleDefault("artifact:reactivate", "rule:context_is_admin",
|
||||
"Policy to reactivate artifact"),
|
||||
policy.RuleDefault("artifact:deactivate", "rule:context_is_admin",
|
||||
"Policy to update artifact"),
|
||||
policy.RuleDefault("artifact:publish", "rule:context_is_admin",
|
||||
"Policy to publish artifact"),
|
||||
policy.RuleDefault("artifact:get", "",
|
||||
"Policy to get artifact definition"),
|
||||
policy.RuleDefault("artifact:list", "",
|
||||
"Policy to list artifacts"),
|
||||
policy.RuleDefault("artifact:delete_public",
|
||||
"'public':%(visibility)s and rule:context_is_admin "
|
||||
"or not 'public':%(visibility)s",
|
||||
"Policy to delete public artifacts"),
|
||||
policy.RuleDefault("artifact:delete_deactivated",
|
||||
"'deactivated':%(status)s and rule:context_is_admin "
|
||||
"or not 'deactivated':%(status)s",
|
||||
"Policy to delete deactivated artifacts"),
|
||||
policy.RuleDefault("artifact:delete", "rule:admin_or_owner and "
|
||||
"rule:artifact:delete_public and "
|
||||
"rule:artifact:delete_deactivated",
|
||||
"Policy to delete artifacts"),
|
||||
policy.RuleDefault("artifact:set_location", "rule:admin_or_owner",
|
||||
"Policy to set custom location for artifact"),
|
||||
policy.RuleDefault("artifact:upload", "rule:admin_or_owner",
|
||||
"Policy to upload blob for artifact"),
|
||||
policy.RuleDefault("artifact:download_deactivated",
|
||||
"'deactivated':%(status)s and rule:context_is_admin "
|
||||
"or not 'deactivated':%(status)s",
|
||||
"Policy to download blob from deactivated artifact"),
|
||||
policy.RuleDefault("artifact:download",
|
||||
"rule:admin_or_owner and "
|
||||
"rule:artifact:download_deactivated",
|
||||
"Policy to download blob from artifact"),
|
||||
]
|
||||
|
||||
|
||||
def list_rules():
|
||||
return artifact_policy_rules
|
||||
|
||||
|
||||
def init(use_conf=True):
|
||||
"""Init an Enforcer class.
|
||||
"""
|
||||
|
||||
global _ENFORCER
|
||||
if not _ENFORCER:
|
||||
_ENFORCER = policy.Enforcer(CONF, use_conf=use_conf)
|
||||
_ENFORCER.register_defaults(list_rules())
|
||||
return _ENFORCER
|
||||
|
||||
|
||||
def reset():
|
||||
global _ENFORCER
|
||||
if _ENFORCER:
|
||||
_ENFORCER.clear()
|
||||
_ENFORCER = None
|
||||
|
||||
|
||||
def authorize(policy_name, target, context, do_raise=True):
|
||||
"""Method checks that user action can be executed according to policies.
|
||||
|
||||
:param policy_name: policy name
|
||||
:param target:
|
||||
:param do_raise
|
||||
:param context:
|
||||
:return: True if check passed
|
||||
"""
|
||||
creds = context.to_policy_values()
|
||||
result = init().authorize(
|
||||
policy_name, target, creds, do_raise=do_raise,
|
||||
exc=exception.PolicyException, policy_name=policy_name)
|
||||
LOG.debug("Policy %(policy)s check %(result)s for request %(request_id)s",
|
||||
{'policy': policy_name,
|
||||
'result': 'passed' if result else 'failed',
|
||||
'request_id': context.request_id})
|
||||
return result
|
||||
|
||||
|
||||
def check_is_admin(context):
|
||||
"""Whether or not roles contains 'admin' role according to policy setting.
|
||||
"""
|
||||
return authorize('context_is_admin', {}, context, do_raise=False)
|
||||
@@ -1,173 +0,0 @@
|
||||
# Copyright (c) 2015 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import operator
|
||||
|
||||
import semantic_version
|
||||
from sqlalchemy.orm.properties import CompositeProperty
|
||||
from sqlalchemy import sql
|
||||
|
||||
from glare.common import exception
|
||||
from glare.i18n import _
|
||||
|
||||
MAX_COMPONENT_LENGTH = pow(2, 16) - 1
|
||||
MAX_NUMERIC_PRERELEASE_LENGTH = 6
|
||||
|
||||
|
||||
class DBVersion(object):
|
||||
def __init__(self, components_long, prerelease, build):
|
||||
"""Creates a DBVersion object out of 3 component fields. This initializer
|
||||
is supposed to be called from SQLAlchemy if 3 database columns are
|
||||
mapped to this composite field.
|
||||
|
||||
:param components_long: a 64-bit long value, containing numeric
|
||||
components of the version
|
||||
:param prerelease: a prerelease label of the version, optionally
|
||||
preformatted with leading zeroes in numeric-only parts of the label
|
||||
:param build: a build label of the version
|
||||
"""
|
||||
version_string = '%s.%s.%s' % _long_to_components(components_long)
|
||||
if prerelease:
|
||||
version_string += '-' + _strip_leading_zeroes_from_prerelease(
|
||||
prerelease)
|
||||
|
||||
if build:
|
||||
version_string += '+' + build
|
||||
self.version = semantic_version.Version(version_string)
|
||||
|
||||
def __repr__(self):
|
||||
return str(self.version)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, DBVersion) and
|
||||
other.version == self.version)
|
||||
|
||||
def __ne__(self, other):
|
||||
return (not isinstance(other, DBVersion)
|
||||
or self.version != other.version)
|
||||
|
||||
def __composite_values__(self):
|
||||
long_version = _version_to_long(self.version)
|
||||
prerelease = _add_leading_zeroes_to_prerelease(self.version.prerelease)
|
||||
build = '.'.join(self.version.build) if self.version.build else None
|
||||
return long_version, prerelease, build
|
||||
|
||||
|
||||
def parse(version_string):
|
||||
version = semantic_version.Version.coerce(version_string)
|
||||
return DBVersion(_version_to_long(version),
|
||||
'.'.join(version.prerelease),
|
||||
'.'.join(version.build))
|
||||
|
||||
|
||||
def _check_limit(value):
|
||||
if value > MAX_COMPONENT_LENGTH:
|
||||
message = _("Version component is too "
|
||||
"large (%d max)") % MAX_COMPONENT_LENGTH
|
||||
raise exception.InvalidVersion(message)
|
||||
|
||||
|
||||
def _version_to_long(version):
|
||||
"""Converts the numeric part of the semver version into the 64-bit long value
|
||||
using the following logic:
|
||||
|
||||
* major version is stored in first 16 bits of the value
|
||||
* minor version is stored in next 16 bits
|
||||
* patch version is stored in following 16 bits
|
||||
* next 2 bits are used to store the flag: if the version has pre-release
|
||||
label then these bits are 00, otherwise they are 11. Intermediate values
|
||||
of the flag (01 and 10) are reserved for future usage.
|
||||
* last 14 bits of the value are reserved for future usage
|
||||
|
||||
The numeric components of version are checked so their value does not
|
||||
exceed 16 bits.
|
||||
|
||||
:param version: a semantic_version.Version object
|
||||
"""
|
||||
_check_limit(version.major)
|
||||
_check_limit(version.minor)
|
||||
_check_limit(version.patch)
|
||||
major = version.major << 48
|
||||
minor = version.minor << 32
|
||||
patch = version.patch << 16
|
||||
flag = 0 if version.prerelease else 2
|
||||
flag <<= 14
|
||||
return major | minor | patch | flag
|
||||
|
||||
|
||||
def _long_to_components(value):
|
||||
major = value >> 48
|
||||
minor = (value - (major << 48)) >> 32
|
||||
patch = (value - (major << 48) - (minor << 32)) >> 16
|
||||
return str(major), str(minor), str(patch)
|
||||
|
||||
|
||||
def _add_leading_zeroes_to_prerelease(label_tuple):
|
||||
if label_tuple is None:
|
||||
return None
|
||||
res = []
|
||||
for component in label_tuple:
|
||||
if component.isdigit():
|
||||
if len(component) > MAX_NUMERIC_PRERELEASE_LENGTH:
|
||||
message = _("Prerelease numeric component is too large "
|
||||
"(%d characters "
|
||||
"max)") % MAX_NUMERIC_PRERELEASE_LENGTH
|
||||
raise exception.InvalidVersion(message)
|
||||
res.append(component.rjust(MAX_NUMERIC_PRERELEASE_LENGTH, '0'))
|
||||
else:
|
||||
res.append(component)
|
||||
return '.'.join(res)
|
||||
|
||||
|
||||
def _strip_leading_zeroes_from_prerelease(string_value):
|
||||
res = []
|
||||
for component in string_value.split('.'):
|
||||
if component.isdigit():
|
||||
val = component.lstrip('0')
|
||||
if len(val) == 0: # Corner case: when the component is just '0'
|
||||
val = '0' # it will be stripped completely, so restore it
|
||||
res.append(val)
|
||||
else:
|
||||
res.append(component)
|
||||
return '.'.join(res)
|
||||
|
||||
strict_op_map = {
|
||||
operator.ge: operator.gt,
|
||||
operator.le: operator.lt
|
||||
}
|
||||
|
||||
|
||||
class VersionComparator(CompositeProperty.Comparator):
|
||||
def _get_comparison(self, values, op):
|
||||
columns = self.__clause_element__().clauses
|
||||
if op in strict_op_map:
|
||||
stricter_op = strict_op_map[op]
|
||||
else:
|
||||
stricter_op = op
|
||||
|
||||
return sql.or_(stricter_op(columns[0], values[0]),
|
||||
sql.and_(columns[0] == values[0],
|
||||
op(columns[1], values[1])))
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._get_comparison(other.__composite_values__(), operator.gt)
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._get_comparison(other.__composite_values__(), operator.ge)
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._get_comparison(other.__composite_values__(), operator.lt)
|
||||
|
||||
def __le__(self, other):
|
||||
return self._get_comparison(other.__composite_values__(), operator.le)
|
||||
@@ -1,114 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from glance_store import backend
|
||||
from glance_store import exceptions as store_exc
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
|
||||
from glare.common import exception
|
||||
from glare.common import utils
|
||||
from glare.store import database
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
database_api = database.DatabaseStoreAPI()
|
||||
|
||||
error_map = [{'catch': store_exc.NotFound,
|
||||
'raise': exception.NotFound},
|
||||
{'catch': store_exc.UnknownScheme,
|
||||
'raise': exception.BadRequest},
|
||||
{'catch': store_exc.BadStoreUri,
|
||||
'raise': exception.BadRequest},
|
||||
{'catch': store_exc.Duplicate,
|
||||
'raise': exception.Conflict},
|
||||
{'catch': store_exc.StorageFull,
|
||||
'raise': exception.Forbidden},
|
||||
{'catch': store_exc.StorageWriteDenied,
|
||||
'raise': exception.Forbidden},
|
||||
{'catch': store_exc.Forbidden,
|
||||
'raise': exception.Forbidden},
|
||||
{'catch': store_exc.Invalid,
|
||||
'raise': exception.BadRequest},
|
||||
{'catch': store_exc.BadStoreConfiguration,
|
||||
'raise': exception.GlareException},
|
||||
{'catch': store_exc.RemoteServiceUnavailable,
|
||||
'raise': exception.BadRequest},
|
||||
{'catch': store_exc.HasSnapshot,
|
||||
'raise': exception.Conflict},
|
||||
{'catch': store_exc.InUseByStore,
|
||||
'raise': exception.Conflict},
|
||||
{'catch': store_exc.BackendException,
|
||||
'raise': exception.GlareException},
|
||||
{'catch': store_exc.GlanceStoreException,
|
||||
'raise': exception.GlareException}]
|
||||
|
||||
|
||||
@utils.error_handler(error_map)
|
||||
def save_blob_to_store(blob_id, blob, context, max_size,
|
||||
store_type=None, verifier=None):
|
||||
"""Save file to specified store type and return location info to the user.
|
||||
|
||||
:param store_type: type of the store, None means save to default store.
|
||||
:param blob_id: id of blob
|
||||
:param blob: blob file iterator
|
||||
:param context: user context
|
||||
:param verifier:signature verified
|
||||
:return: tuple of values: (location_uri, size, checksums)
|
||||
"""
|
||||
if store_type not in set(CONF.glance_store.stores + ['database']):
|
||||
LOG.warning("Incorrect backend configuration - scheme '%s' is not"
|
||||
" supported. Fallback to default store.", store_type)
|
||||
store_type = None
|
||||
data = utils.LimitingReader(utils.CooperativeReader(blob), max_size)
|
||||
|
||||
LOG.debug('Start uploading blob %s.', blob_id)
|
||||
if store_type == 'database':
|
||||
location = database_api.add_to_backend(
|
||||
blob_id, data, context, verifier)
|
||||
else:
|
||||
(location, size, md5checksum, __) = backend.add_to_backend(
|
||||
CONF, blob_id, data, 0, store_type, context, verifier)
|
||||
LOG.debug('Uploading of blob %s is finished.', blob_id)
|
||||
|
||||
checksums = {"md5": data.md5.hexdigest(),
|
||||
"sha1": data.sha1.hexdigest(),
|
||||
"sha256": data.sha256.hexdigest()}
|
||||
return location, data.bytes_read, checksums
|
||||
|
||||
|
||||
@utils.error_handler(error_map)
|
||||
def load_from_store(uri, context):
|
||||
"""Load file from store backend.
|
||||
|
||||
:param uri: blob uri
|
||||
:param context: user context
|
||||
:return: file iterator
|
||||
"""
|
||||
if uri.startswith("sql://"):
|
||||
return utils.BlobIterator(
|
||||
database_api.get_from_store(uri, context))
|
||||
return backend.get_from_backend(uri=uri, context=context)[0]
|
||||
|
||||
|
||||
@utils.error_handler(error_map)
|
||||
def delete_blob(uri, context):
|
||||
"""Delete blob from backend store.
|
||||
|
||||
:param uri: blob uri
|
||||
:param context: user context
|
||||
"""
|
||||
if uri.startswith("sql://"):
|
||||
return database_api.delete_from_store(uri, context)
|
||||
return backend.delete_from_backend(uri, context)
|
||||
@@ -1,582 +0,0 @@
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2014 SoftLayer Technologies, Inc.
|
||||
# Copyright 2015 Mirantis, Inc
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
System-level utilities and helper functions.
|
||||
"""
|
||||
try:
|
||||
from eventlet import sleep
|
||||
except ImportError:
|
||||
from time import sleep
|
||||
from eventlet.green import socket
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
|
||||
from OpenSSL import crypto
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import encodeutils
|
||||
from oslo_utils import excutils
|
||||
from oslo_utils import timeutils
|
||||
from oslo_utils import uuidutils
|
||||
from oslo_versionedobjects import fields
|
||||
import six
|
||||
|
||||
from glare.common import exception
|
||||
from glare.i18n import _
|
||||
from glare.objects.meta import fields as glare_fields
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
GLARE_TEST_SOCKET_FD_STR = 'GLARE_TEST_SOCKET_FD'
|
||||
|
||||
|
||||
def cooperative_iter(iter):
|
||||
"""Return an iterator which schedules after each
|
||||
iteration. This can prevent eventlet thread starvation.
|
||||
|
||||
:param iter: an iterator to wrap
|
||||
"""
|
||||
try:
|
||||
for chunk in iter:
|
||||
sleep(0)
|
||||
yield chunk
|
||||
except Exception as err:
|
||||
with excutils.save_and_reraise_exception():
|
||||
LOG.error("Error: cooperative_iter exception %s", err)
|
||||
|
||||
|
||||
def cooperative_read(fd):
|
||||
"""Wrap a file descriptor's read with a partial function which schedules
|
||||
after each read. This can prevent eventlet thread starvation.
|
||||
|
||||
:param fd: a file descriptor to wrap
|
||||
"""
|
||||
def readfn(*args):
|
||||
result = fd.read(*args)
|
||||
sleep(0)
|
||||
return result
|
||||
return readfn
|
||||
|
||||
|
||||
MAX_COOP_READER_BUFFER_SIZE = 134217728 # 128M seems like a sane buffer limit
|
||||
|
||||
|
||||
class CooperativeReader(object):
|
||||
"""An eventlet thread friendly class for reading in blob data.
|
||||
|
||||
When accessing data either through the iterator or the read method
|
||||
we perform a sleep to allow a co-operative yield. When there is more than
|
||||
one blob being uploaded/downloaded this prevents eventlet thread
|
||||
starvation, ie allows all threads to be scheduled periodically rather than
|
||||
having the same thread be continuously active.
|
||||
"""
|
||||
def __init__(self, fd):
|
||||
""":param fd: Underlying blob file object
|
||||
"""
|
||||
self.fd = fd
|
||||
self.iterator = None
|
||||
# NOTE(markwash): if the underlying supports read(), overwrite the
|
||||
# default iterator-based implementation with cooperative_read which
|
||||
# is more straightforward
|
||||
if hasattr(fd, 'read'):
|
||||
self.read = cooperative_read(fd)
|
||||
else:
|
||||
self.iterator = None
|
||||
self.buffer = b''
|
||||
self.position = 0
|
||||
|
||||
def read(self, length=None):
|
||||
"""Return the requested amount of bytes, fetching the next chunk of
|
||||
the underlying iterator when needed.
|
||||
|
||||
This is replaced with cooperative_read in __init__ if the underlying
|
||||
fd already supports read().
|
||||
"""
|
||||
if length is None:
|
||||
if len(self.buffer) - self.position > 0:
|
||||
# if no length specified but some data exists in buffer,
|
||||
# return that data and clear the buffer
|
||||
result = self.buffer[self.position:]
|
||||
self.buffer = b''
|
||||
self.position = 0
|
||||
return str(result)
|
||||
else:
|
||||
# otherwise read the next chunk from the underlying iterator
|
||||
# and return it as a whole. Reset the buffer, as subsequent
|
||||
# calls may specify the length
|
||||
try:
|
||||
if self.iterator is None:
|
||||
self.iterator = self.__iter__()
|
||||
return next(self.iterator)
|
||||
except StopIteration:
|
||||
return ''
|
||||
finally:
|
||||
self.buffer = b''
|
||||
self.position = 0
|
||||
else:
|
||||
result = bytearray()
|
||||
while len(result) < length:
|
||||
if self.position < len(self.buffer):
|
||||
to_read = length - len(result)
|
||||
chunk = self.buffer[self.position:self.position + to_read]
|
||||
result.extend(chunk)
|
||||
|
||||
# This check is here to prevent potential OOM issues if
|
||||
# this code is called with unreasonably high values of read
|
||||
# size. Currently it is only called from the HTTP clients
|
||||
# of Glare backend stores, which use httplib for data
|
||||
# streaming, which has readsize hardcoded to 8K, so this
|
||||
# check should never fire. Regardless it still worths to
|
||||
# make the check, as the code may be reused somewhere else.
|
||||
if len(result) >= MAX_COOP_READER_BUFFER_SIZE:
|
||||
raise exception.RequestEntityTooLarge()
|
||||
self.position += len(chunk)
|
||||
else:
|
||||
try:
|
||||
if self.iterator is None:
|
||||
self.iterator = self.__iter__()
|
||||
self.buffer = next(self.iterator)
|
||||
self.position = 0
|
||||
except StopIteration:
|
||||
self.buffer = b''
|
||||
self.position = 0
|
||||
return bytes(result)
|
||||
return bytes(result)
|
||||
|
||||
def __iter__(self):
|
||||
return cooperative_iter(self.fd.__iter__())
|
||||
|
||||
|
||||
class LimitingReader(object):
|
||||
"""Reader designed to fail when reading blob data past the configured
|
||||
allowable amount.
|
||||
"""
|
||||
def __init__(self, data, limit):
|
||||
"""
|
||||
:param data: Underlying blob data object
|
||||
:param limit: maximum number of bytes the reader should allow
|
||||
"""
|
||||
self.data = data
|
||||
self.limit = limit
|
||||
self.bytes_read = 0
|
||||
self.md5 = hashlib.md5()
|
||||
self.sha1 = hashlib.sha1()
|
||||
self.sha256 = hashlib.sha256()
|
||||
|
||||
def __iter__(self):
|
||||
for chunk in self.data:
|
||||
self.bytes_read += len(chunk)
|
||||
if self.bytes_read > self.limit:
|
||||
raise exception.RequestEntityTooLarge()
|
||||
else:
|
||||
yield chunk
|
||||
|
||||
def read(self, length=None):
|
||||
res = self.data.read() if length is None else self.data.read(length)
|
||||
len_result = len(res)
|
||||
self.bytes_read += len_result
|
||||
if len_result:
|
||||
self.md5.update(res)
|
||||
self.sha1.update(res)
|
||||
self.sha256.update(res)
|
||||
if self.bytes_read > self.limit:
|
||||
message = _("The server is refusing to process a request because"
|
||||
" the request entity is larger than the server is"
|
||||
" willing or able to process - %s bytes.") % self.limit
|
||||
raise exception.RequestEntityTooLarge(message=message)
|
||||
return res
|
||||
|
||||
|
||||
def validate_key_cert(key_file, cert_file):
|
||||
try:
|
||||
error_key_name = "private key"
|
||||
error_filename = key_file
|
||||
with open(key_file, 'r') as keyfile:
|
||||
key_str = keyfile.read()
|
||||
key = crypto.load_privatekey(crypto.FILETYPE_PEM, key_str)
|
||||
|
||||
error_key_name = "certificate"
|
||||
error_filename = cert_file
|
||||
with open(cert_file, 'r') as certfile:
|
||||
cert_str = certfile.read()
|
||||
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_str)
|
||||
except IOError as ioe:
|
||||
raise RuntimeError(_("There is a problem with your %(error_key_name)s "
|
||||
"%(error_filename)s. Please verify it."
|
||||
" Error: %(ioe)s") %
|
||||
{'error_key_name': error_key_name,
|
||||
'error_filename': error_filename,
|
||||
'ioe': ioe})
|
||||
except crypto.Error as ce:
|
||||
raise RuntimeError(_("There is a problem with your %(error_key_name)s "
|
||||
"%(error_filename)s. Please verify it. OpenSSL"
|
||||
" error: %(ce)s") %
|
||||
{'error_key_name': error_key_name,
|
||||
'error_filename': error_filename,
|
||||
'ce': ce})
|
||||
|
||||
try:
|
||||
data = uuidutils.generate_uuid()
|
||||
# On Python 3, explicitly encode to UTF-8 to call crypto.sign() which
|
||||
# requires bytes. Otherwise, it raises a deprecation warning (and
|
||||
# will raise an error later).
|
||||
data = encodeutils.to_utf8(data)
|
||||
digest = CONF.digest_algorithm
|
||||
if digest == 'sha1':
|
||||
LOG.warning(
|
||||
'The FIPS (FEDERAL INFORMATION PROCESSING STANDARDS)'
|
||||
' state that the SHA-1 is not suitable for'
|
||||
' general-purpose digital signature applications (as'
|
||||
' specified in FIPS 186-3) that require 112 bits of'
|
||||
' security. The default value is sha1 in Kilo for a'
|
||||
' smooth upgrade process, and it will be updated'
|
||||
' with sha256 in next release(L).')
|
||||
out = crypto.sign(key, data, digest)
|
||||
crypto.verify(cert, out, data, digest)
|
||||
except crypto.Error as ce:
|
||||
raise RuntimeError(_("There is a problem with your key pair. "
|
||||
"Please verify that cert %(cert_file)s and "
|
||||
"key %(key_file)s belong together. OpenSSL "
|
||||
"error %(ce)s") % {'cert_file': cert_file,
|
||||
'key_file': key_file,
|
||||
'ce': ce})
|
||||
|
||||
|
||||
def get_test_suite_socket():
|
||||
global GLARE_TEST_SOCKET_FD_STR
|
||||
if GLARE_TEST_SOCKET_FD_STR in os.environ:
|
||||
fd = int(os.environ[GLARE_TEST_SOCKET_FD_STR])
|
||||
sock = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM)
|
||||
if six.PY2:
|
||||
sock = socket.SocketType(_sock=sock)
|
||||
sock.listen(CONF.backlog)
|
||||
del os.environ[GLARE_TEST_SOCKET_FD_STR]
|
||||
os.close(fd)
|
||||
return sock
|
||||
return None
|
||||
|
||||
|
||||
try:
|
||||
REGEX_4BYTE_UNICODE = re.compile(u'[\U00010000-\U0010ffff]')
|
||||
except re.error:
|
||||
# UCS-2 build case
|
||||
REGEX_4BYTE_UNICODE = re.compile(u'[\uD800-\uDBFF][\uDC00-\uDFFF]')
|
||||
|
||||
|
||||
def no_4byte_params(f):
|
||||
"""Checks that no 4 byte unicode characters are allowed
|
||||
in dicts' keys/values and string's parameters.
|
||||
"""
|
||||
def wrapper(*args, **kwargs):
|
||||
|
||||
def _is_match(some_str):
|
||||
return (isinstance(some_str, six.text_type) and
|
||||
REGEX_4BYTE_UNICODE.findall(some_str) != [])
|
||||
|
||||
def _check_dict(data_dict):
|
||||
# a dict of dicts has to be checked recursively
|
||||
for key, value in data_dict.items():
|
||||
if isinstance(value, dict):
|
||||
_check_dict(value)
|
||||
else:
|
||||
if _is_match(key):
|
||||
msg = _("Property names can't contain 4 byte unicode.")
|
||||
raise exception.BadRequest(msg)
|
||||
if _is_match(value):
|
||||
msg = (_("%s can't contain 4 byte unicode characters.")
|
||||
% key.title())
|
||||
raise exception.BadRequest(msg)
|
||||
|
||||
for data_dict in [arg for arg in args if isinstance(arg, dict)]:
|
||||
_check_dict(data_dict)
|
||||
# now check args for str values
|
||||
for arg in args:
|
||||
if _is_match(arg):
|
||||
msg = _("Param values can't contain 4 byte unicode.")
|
||||
raise exception.BadRequest(msg)
|
||||
# check kwargs as well, as params are passed as kwargs via
|
||||
# registry calls
|
||||
_check_dict(kwargs)
|
||||
return f(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
def stash_conf_values():
|
||||
"""Make a copy of some of the current global CONF's settings.
|
||||
Allows determining if any of these values have changed
|
||||
when the config is reloaded.
|
||||
"""
|
||||
conf = {
|
||||
'bind_host': CONF.bind_host,
|
||||
'bind_port': CONF.bind_port,
|
||||
'tcp_keepidle': CONF.cert_file,
|
||||
'backlog': CONF.backlog,
|
||||
'key_file': CONF.key_file,
|
||||
'cert_file': CONF.cert_file,
|
||||
'enabled_artifact_types': CONF.enabled_artifact_types,
|
||||
'custom_artifact_types_modules': CONF.custom_artifact_types_modules
|
||||
}
|
||||
|
||||
return conf
|
||||
|
||||
|
||||
def split_filter_op(expression):
|
||||
"""Split operator from threshold in an expression.
|
||||
Designed for use on a comparative-filtering query field.
|
||||
When no operator is found, default to an equality comparison.
|
||||
|
||||
:param expression: the expression to parse
|
||||
:return: a tuple (operator, threshold) parsed from expression
|
||||
"""
|
||||
left, sep, right = expression.partition(':')
|
||||
if sep:
|
||||
# If the expression is a date of the format ISO 8601 like
|
||||
# CCYY-MM-DDThh:mm:ss+hh:mm and has no operator, it should
|
||||
# not be partitioned, and a default operator of eq should be
|
||||
# assumed.
|
||||
try:
|
||||
timeutils.parse_isotime(expression)
|
||||
op = 'eq'
|
||||
threshold = expression
|
||||
except ValueError:
|
||||
op = left
|
||||
threshold = right
|
||||
else:
|
||||
op = 'eq' # default operator
|
||||
threshold = left
|
||||
|
||||
# NOTE stevelle decoding escaped values may be needed later
|
||||
return op, threshold
|
||||
|
||||
|
||||
def validate_quotes(value):
|
||||
"""Validate filter values
|
||||
|
||||
Validation opening/closing quotes in the expression.
|
||||
"""
|
||||
open_quotes = True
|
||||
for i in range(len(value)):
|
||||
if value[i] == '"':
|
||||
if i and value[i - 1] == '\\':
|
||||
continue
|
||||
if open_quotes:
|
||||
if i and value[i - 1] != ',':
|
||||
msg = _("Invalid filter value %s. There is no comma "
|
||||
"before opening quotation mark.") % value
|
||||
raise exception.InvalidParameterValue(message=msg)
|
||||
else:
|
||||
if i + 1 != len(value) and value[i + 1] != ",":
|
||||
msg = _("Invalid filter value %s. There is no comma "
|
||||
"after closing quotation mark.") % value
|
||||
raise exception.InvalidParameterValue(message=msg)
|
||||
open_quotes = not open_quotes
|
||||
if not open_quotes:
|
||||
msg = _("Invalid filter value %s. The quote is not closed.") % value
|
||||
raise exception.InvalidParameterValue(message=msg)
|
||||
|
||||
|
||||
def split_filter_value_for_quotes(value):
|
||||
"""Split filter values
|
||||
|
||||
Split values by commas and quotes for 'in' operator, according api-wg.
|
||||
"""
|
||||
validate_quotes(value)
|
||||
tmp = re.compile(r'''
|
||||
"( # if found a double-quote
|
||||
[^\"\\]* # take characters either non-quotes or backslashes
|
||||
(?:\\. # take backslashes and character after it
|
||||
[^\"\\]*)* # take characters either non-quotes or backslashes
|
||||
) # before double-quote
|
||||
",? # a double-quote with comma maybe
|
||||
| ([^,]+),? # if not found double-quote take any non-comma
|
||||
# characters with comma maybe
|
||||
| , # if we have only comma take empty string
|
||||
''', re.VERBOSE)
|
||||
return [val[0] or val[1] for val in re.findall(tmp, value)]
|
||||
|
||||
|
||||
class error_handler(object):
|
||||
def __init__(self, error_map, default_exception=None):
|
||||
"""Init method of the class.
|
||||
|
||||
:param error_map: dict of exception that can be raised
|
||||
in func and exceptions that must be raised for these exceptions.
|
||||
For example, if sqlalchemy NotFound might be raised and we need
|
||||
re-raise it as glare NotFound exception then error_map must
|
||||
contain {"catch": SQLAlchemyNotFound,
|
||||
"raise": exceptions.NotFound}
|
||||
:param default_exception: default exception that must be raised if
|
||||
exception that cannot be found in error map was raised
|
||||
:return: func
|
||||
"""
|
||||
self.error_map = error_map
|
||||
self.default_exception = default_exception
|
||||
|
||||
def __call__(self, f):
|
||||
"""Decorator that catches exception that came from func or method.
|
||||
|
||||
:param f: target func
|
||||
"""
|
||||
|
||||
def new_function(*args, **kwargs):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except Exception as e:
|
||||
for map_record in self.error_map:
|
||||
if isinstance(e, map_record['catch']):
|
||||
raise map_record['raise'](str(e))
|
||||
else:
|
||||
if self.default_exception:
|
||||
raise self.default_exception(str(e))
|
||||
else:
|
||||
raise
|
||||
return new_function
|
||||
|
||||
|
||||
def get_schema_type(attr):
|
||||
if isinstance(attr, fields.IntegerField) or attr is fields.Integer:
|
||||
return 'integer'
|
||||
elif isinstance(attr, fields.FloatField) or attr is fields.Float:
|
||||
return 'number'
|
||||
elif isinstance(attr, fields.FlexibleBooleanField) \
|
||||
or attr is fields.FlexibleBoolean:
|
||||
return 'boolean'
|
||||
elif isinstance(attr, glare_fields.List):
|
||||
return 'array'
|
||||
elif isinstance(attr, (glare_fields.Dict, glare_fields.BlobField)):
|
||||
return 'object'
|
||||
return 'string'
|
||||
|
||||
|
||||
def get_glare_type(attr):
|
||||
if isinstance(attr, fields.IntegerField):
|
||||
return 'Integer'
|
||||
elif isinstance(attr, fields.FloatField):
|
||||
return 'Float'
|
||||
elif isinstance(attr, fields.FlexibleBooleanField):
|
||||
return 'Boolean'
|
||||
elif isinstance(attr, fields.DateTimeField):
|
||||
return 'DateTime'
|
||||
elif isinstance(attr, glare_fields.BlobField):
|
||||
return 'Blob'
|
||||
elif isinstance(attr, glare_fields.Link):
|
||||
return 'Link'
|
||||
elif isinstance(attr, glare_fields.List):
|
||||
return _get_element_type(attr.element_type) + 'List'
|
||||
elif isinstance(attr, glare_fields.Dict):
|
||||
return _get_element_type(attr.element_type) + 'Dict'
|
||||
return 'String'
|
||||
|
||||
|
||||
def _get_element_type(element_type):
|
||||
if element_type is fields.FlexibleBooleanField:
|
||||
return 'Boolean'
|
||||
elif element_type is fields.Integer:
|
||||
return 'Integer'
|
||||
elif element_type is fields.Float:
|
||||
return 'Float'
|
||||
elif element_type is glare_fields.BlobFieldType:
|
||||
return 'Blob'
|
||||
elif element_type is glare_fields.LinkFieldType:
|
||||
return 'Link'
|
||||
return 'String'
|
||||
|
||||
|
||||
class BlobIterator(object):
|
||||
"""Reads data from a blob, one chunk at a time.
|
||||
"""
|
||||
|
||||
def __init__(self, data, chunk_size=65536):
|
||||
self.chunk_size = chunk_size
|
||||
self.data = data
|
||||
|
||||
def __iter__(self):
|
||||
bytes_left = len(self.data)
|
||||
i = 0
|
||||
while bytes_left > 0:
|
||||
data = self.data[i * self.chunk_size:(i + 1) * self.chunk_size]
|
||||
bytes_left -= len(data)
|
||||
yield data
|
||||
raise StopIteration()
|
||||
|
||||
|
||||
def validate_status_transition(af, from_status, to_status):
|
||||
if from_status == 'deleted':
|
||||
msg = _("Cannot change status if artifact is deleted.")
|
||||
raise exception.Forbidden(msg)
|
||||
if to_status == 'active':
|
||||
if from_status == 'drafted':
|
||||
for name, type_obj in af.fields.items():
|
||||
if type_obj.required_on_activate and getattr(af, name) is None:
|
||||
msg = _("'%s' field value must be set before "
|
||||
"activation.") % name
|
||||
raise exception.Forbidden(msg)
|
||||
elif to_status == 'drafted':
|
||||
if from_status != 'drafted':
|
||||
msg = _("Cannot change status to 'drafted'") % from_status
|
||||
raise exception.Forbidden(msg)
|
||||
elif to_status == 'deactivated':
|
||||
if from_status not in ('active', 'deactivated'):
|
||||
msg = _("Cannot deactivate artifact if it's not active.")
|
||||
raise exception.Forbidden(msg)
|
||||
elif to_status == 'deleted':
|
||||
msg = _("Cannot delete artifact with PATCH requests. Use special "
|
||||
"API to do this.")
|
||||
raise exception.Forbidden(msg)
|
||||
else:
|
||||
msg = _("Unknown artifact status: %s.") % to_status
|
||||
raise exception.BadRequest(msg)
|
||||
|
||||
|
||||
def validate_visibility_transition(af, from_visibility, to_visibility):
|
||||
if to_visibility == 'private':
|
||||
if from_visibility != 'private':
|
||||
msg = _("Cannot make artifact private again.")
|
||||
raise exception.Forbidden()
|
||||
elif to_visibility == 'public':
|
||||
if af.status != 'active':
|
||||
msg = _("Cannot change visibility to 'public' if artifact"
|
||||
" is not active.")
|
||||
raise exception.Forbidden(msg)
|
||||
else:
|
||||
msg = _("Unknown artifact visibility: %s.") % to_visibility
|
||||
raise exception.BadRequest(msg)
|
||||
|
||||
|
||||
def validate_change_allowed(af, field_name):
|
||||
"""Validate if fields can be set for the artifact."""
|
||||
if field_name not in af.fields:
|
||||
msg = _("Cannot add new field '%s' to artifact.") % field_name
|
||||
raise exception.BadRequest(msg)
|
||||
if af.status not in ('active', 'drafted'):
|
||||
msg = _("Forbidden to change fields "
|
||||
"if artifact is not active or drafted.")
|
||||
raise exception.Forbidden(message=msg)
|
||||
if af.fields[field_name].system is True:
|
||||
msg = _("Forbidden to specify system field %s. It is not "
|
||||
"available for modifying by users.") % field_name
|
||||
raise exception.Forbidden(msg)
|
||||
if af.status == 'active' and not af.fields[field_name].mutable:
|
||||
msg = (_("Forbidden to change field '%s' after activation.")
|
||||
% field_name)
|
||||
raise exception.Forbidden(message=msg)
|
||||
@@ -1,834 +0,0 @@
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2010 OpenStack Foundation
|
||||
# Copyright 2014 IBM Corp.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Utility methods for working with WSGI servers
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import errno
|
||||
import functools
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
import time
|
||||
|
||||
import eventlet
|
||||
from eventlet.green import socket
|
||||
from eventlet.green import ssl
|
||||
import eventlet.greenio
|
||||
import eventlet.wsgi
|
||||
import glance_store
|
||||
from oslo_concurrency import processutils
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import encodeutils
|
||||
from oslo_utils import strutils
|
||||
from osprofiler import opts as profiler_opts
|
||||
import routes
|
||||
import routes.middleware
|
||||
import six
|
||||
import webob.dec
|
||||
import webob.exc
|
||||
from webob import multidict
|
||||
|
||||
from glare.common import exception as glare_exc
|
||||
from glare.common import utils
|
||||
from glare import i18n
|
||||
from glare.i18n import _
|
||||
|
||||
|
||||
bind_opts = [
|
||||
cfg.HostAddressOpt('bind_host', default='0.0.0.0',
|
||||
help=_('Address to bind the server. Useful when '
|
||||
'selecting a particular network interface.')),
|
||||
cfg.PortOpt('bind_port',
|
||||
help=_('The port on which the server will listen.')),
|
||||
]
|
||||
|
||||
socket_opts = [
|
||||
cfg.IntOpt('backlog', default=4096,
|
||||
help=_('The backlog value that will be used when creating the '
|
||||
'TCP listener socket.')),
|
||||
cfg.IntOpt('tcp_keepidle', default=600,
|
||||
help=_('The value for the socket option TCP_KEEPIDLE. This is '
|
||||
'the time in seconds that the connection must be idle '
|
||||
'before TCP starts sending keepalive probes.')),
|
||||
cfg.StrOpt('ca_file', help=_('CA certificate file to use to verify '
|
||||
'connecting clients.')),
|
||||
cfg.StrOpt('cert_file', help=_('Certificate file to use when starting API '
|
||||
'server securely.')),
|
||||
cfg.StrOpt('key_file', help=_('Private key file to use when starting API '
|
||||
'server securely.')),
|
||||
]
|
||||
|
||||
eventlet_opts = [
|
||||
cfg.IntOpt('workers', default=0, min=0,
|
||||
help=_('The number of child process workers that will be '
|
||||
'created to service requests. The default will be '
|
||||
'equal to the number of CPUs available.')),
|
||||
cfg.IntOpt('max_header_line', default=16384, min=0,
|
||||
help=_('Maximum line size of message headers to be accepted. '
|
||||
'max_header_line may need to be increased when using '
|
||||
'large tokens (typically those generated by the '
|
||||
'Keystone v3 API with big service catalogs')),
|
||||
cfg.BoolOpt('http_keepalive', default=True,
|
||||
help=_('If False, server will return the header '
|
||||
'"Connection: close", '
|
||||
'If True, server will return "Connection: Keep-Alive" '
|
||||
'in its responses. In order to close the client socket '
|
||||
'connection explicitly after the response is sent and '
|
||||
'read successfully by the client, you simply have to '
|
||||
'set this option to False when you create a wsgi '
|
||||
'server.')),
|
||||
cfg.IntOpt('client_socket_timeout', default=900, min=0,
|
||||
help=_('Timeout for client connections\' socket operations. '
|
||||
'If an incoming connection is idle for this number of '
|
||||
'seconds it will be closed. A value of \'0\' means '
|
||||
'wait forever.')),
|
||||
]
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(bind_opts)
|
||||
CONF.register_opts(socket_opts)
|
||||
CONF.register_opts(eventlet_opts)
|
||||
profiler_opts.set_defaults(CONF)
|
||||
|
||||
ASYNC_EVENTLET_THREAD_POOL_LIST = []
|
||||
|
||||
|
||||
def get_num_workers():
|
||||
"""Return the configured number of workers."""
|
||||
if CONF.workers == 0:
|
||||
# 0 implies the number of CPUs
|
||||
return processutils.get_worker_count()
|
||||
return CONF.workers
|
||||
|
||||
|
||||
def get_bind_addr(default_port=None):
|
||||
"""Return the host and port to bind to."""
|
||||
return (CONF.bind_host, CONF.bind_port or default_port)
|
||||
|
||||
|
||||
def ssl_wrap_socket(sock):
|
||||
"""Wrap an existing socket in SSL
|
||||
|
||||
:param sock: non-SSL socket to wrap
|
||||
|
||||
:returns: An SSL wrapped socket
|
||||
"""
|
||||
utils.validate_key_cert(CONF.key_file, CONF.cert_file)
|
||||
|
||||
ssl_kwargs = {
|
||||
'server_side': True,
|
||||
'certfile': CONF.cert_file,
|
||||
'keyfile': CONF.key_file,
|
||||
'cert_reqs': ssl.CERT_NONE,
|
||||
}
|
||||
|
||||
if CONF.ca_file:
|
||||
ssl_kwargs['ca_certs'] = CONF.ca_file
|
||||
ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED
|
||||
|
||||
return ssl.wrap_socket(sock, **ssl_kwargs)
|
||||
|
||||
|
||||
def get_socket(default_port):
|
||||
"""Bind socket to bind ip:port in conf
|
||||
|
||||
:param default_port: port to bind to if none is specified in conf
|
||||
|
||||
:returns: a socket object as returned from socket.listen or
|
||||
ssl.wrap_socket if conf specifies cert_file
|
||||
"""
|
||||
bind_addr = get_bind_addr(default_port)
|
||||
|
||||
# TODO(jaypipes): eventlet's greened socket module does not actually
|
||||
# support IPv6 in getaddrinfo(). We need to get around this in the
|
||||
# future or monitor upstream for a fix
|
||||
address_family = [
|
||||
addr[0] for addr in socket.getaddrinfo(bind_addr[0],
|
||||
bind_addr[1],
|
||||
socket.AF_UNSPEC,
|
||||
socket.SOCK_STREAM)
|
||||
if addr[0] in (socket.AF_INET, socket.AF_INET6)
|
||||
][0]
|
||||
|
||||
use_ssl = CONF.key_file or CONF.cert_file
|
||||
if use_ssl and (not CONF.key_file or not CONF.cert_file):
|
||||
raise RuntimeError(_("When running server in SSL mode, you must "
|
||||
"specify both a cert_file and key_file "
|
||||
"option value in your configuration file"))
|
||||
|
||||
sock = utils.get_test_suite_socket()
|
||||
retry_until = time.time() + 30
|
||||
|
||||
while not sock and time.time() < retry_until:
|
||||
try:
|
||||
sock = eventlet.listen(bind_addr,
|
||||
backlog=CONF.backlog,
|
||||
family=address_family)
|
||||
except socket.error as err:
|
||||
if err.args[0] != errno.EADDRINUSE:
|
||||
raise
|
||||
eventlet.sleep(0.1)
|
||||
if not sock:
|
||||
raise RuntimeError(_("Could not bind to %(host)s:%(port)s after"
|
||||
" trying for 30 seconds") %
|
||||
{'host': bind_addr[0],
|
||||
'port': bind_addr[1]})
|
||||
|
||||
return sock
|
||||
|
||||
|
||||
def set_eventlet_hub():
|
||||
try:
|
||||
eventlet.hubs.use_hub('poll')
|
||||
except Exception:
|
||||
try:
|
||||
eventlet.hubs.use_hub('selects')
|
||||
except Exception:
|
||||
msg = _("eventlet 'poll' nor 'selects' hubs are available "
|
||||
"on this platform")
|
||||
raise glare_exc.WorkerCreationFailure(
|
||||
reason=msg)
|
||||
|
||||
|
||||
def initialize_glance_store():
|
||||
"""Initialize glance store."""
|
||||
glance_store.register_opts(CONF)
|
||||
glance_store.create_stores(CONF)
|
||||
glance_store.verify_default_store()
|
||||
|
||||
|
||||
def get_asynchronous_eventlet_pool(size=1000):
|
||||
"""Return eventlet pool to caller.
|
||||
|
||||
Also store pools created in global list, to wait on
|
||||
it after getting signal for graceful shutdown.
|
||||
|
||||
:param size: eventlet pool size
|
||||
:returns: eventlet pool
|
||||
"""
|
||||
global ASYNC_EVENTLET_THREAD_POOL_LIST
|
||||
|
||||
pool = eventlet.GreenPool(size=size)
|
||||
# Add pool to global ASYNC_EVENTLET_THREAD_POOL_LIST
|
||||
ASYNC_EVENTLET_THREAD_POOL_LIST.append(pool)
|
||||
|
||||
return pool
|
||||
|
||||
|
||||
class Server(object):
|
||||
"""Server class to manage multiple WSGI sockets and applications.
|
||||
|
||||
This class requires initialize_glance_store set to True if
|
||||
glance store needs to be initialized.
|
||||
"""
|
||||
def __init__(self, threads=1000, initialize_glance_store=False):
|
||||
os.umask(0o27) # ensure files are created with the correct privileges
|
||||
self._logger = logging.getLogger("eventlet.wsgi.server")
|
||||
self.threads = threads
|
||||
self.children = set()
|
||||
self.stale_children = set()
|
||||
self.running = True
|
||||
self.initialize_glance_store = initialize_glance_store
|
||||
self.pgid = os.getpid()
|
||||
try:
|
||||
os.setpgid(self.pgid, self.pgid)
|
||||
except OSError:
|
||||
self.pgid = 0
|
||||
|
||||
def hup(self, *args):
|
||||
"""Reloads configuration files with zero down time
|
||||
"""
|
||||
signal.signal(signal.SIGHUP, signal.SIG_IGN)
|
||||
raise glare_exc.SIGHUPInterrupt
|
||||
|
||||
def kill_children(self, *args):
|
||||
"""Kills the entire process group."""
|
||||
signal.signal(signal.SIGTERM, signal.SIG_IGN)
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
self.running = False
|
||||
os.killpg(self.pgid, signal.SIGTERM)
|
||||
|
||||
def start(self, application, default_port):
|
||||
"""Run a WSGI server with the given application.
|
||||
|
||||
:param application: The application to be run in the WSGI server
|
||||
:param default_port: Port to bind to if none is specified in conf
|
||||
"""
|
||||
self.application = application
|
||||
self.default_port = default_port
|
||||
self.configure()
|
||||
self.start_wsgi()
|
||||
|
||||
def start_wsgi(self):
|
||||
workers = get_num_workers()
|
||||
if workers is None:
|
||||
# Useful for profiling, test, debug etc.
|
||||
self.pool = self.create_pool()
|
||||
self.pool.spawn_n(self._single_run, self.application, self.sock)
|
||||
return
|
||||
else:
|
||||
LOG.info("Starting %d workers", workers)
|
||||
signal.signal(signal.SIGTERM, self.kill_children)
|
||||
signal.signal(signal.SIGINT, self.kill_children)
|
||||
signal.signal(signal.SIGHUP, self.hup)
|
||||
while len(self.children) < workers:
|
||||
self.run_child()
|
||||
|
||||
def create_pool(self):
|
||||
return get_asynchronous_eventlet_pool(size=self.threads)
|
||||
|
||||
def _remove_children(self, pid):
|
||||
if pid in self.children:
|
||||
self.children.remove(pid)
|
||||
LOG.info('Removed dead child %s', pid)
|
||||
elif pid in self.stale_children:
|
||||
self.stale_children.remove(pid)
|
||||
LOG.info('Removed stale child %s', pid)
|
||||
else:
|
||||
LOG.warning('Unrecognised child %s', pid)
|
||||
|
||||
def _verify_and_respawn_children(self, pid, status):
|
||||
if len(self.stale_children) == 0:
|
||||
LOG.debug('No stale children')
|
||||
if os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0:
|
||||
LOG.error('Not respawning child %d, cannot '
|
||||
'recover from termination', pid)
|
||||
if not self.children and not self.stale_children:
|
||||
LOG.info('All workers have terminated. Exiting')
|
||||
self.running = False
|
||||
else:
|
||||
if len(self.children) < get_num_workers():
|
||||
self.run_child()
|
||||
|
||||
def wait_on_children(self):
|
||||
while self.running:
|
||||
try:
|
||||
pid, status = os.wait()
|
||||
if os.WIFEXITED(status) or os.WIFSIGNALED(status):
|
||||
self._remove_children(pid)
|
||||
self._verify_and_respawn_children(pid, status)
|
||||
except OSError as err:
|
||||
if err.errno not in (errno.EINTR, errno.ECHILD):
|
||||
raise
|
||||
except KeyboardInterrupt:
|
||||
LOG.info('Caught keyboard interrupt. Exiting.')
|
||||
break
|
||||
except glare_exc.SIGHUPInterrupt:
|
||||
self.reload()
|
||||
continue
|
||||
eventlet.greenio.shutdown_safe(self.sock)
|
||||
self.sock.close()
|
||||
LOG.debug('Exited')
|
||||
|
||||
def configure(self, old_conf=None, has_changed=None):
|
||||
"""Apply configuration settings
|
||||
|
||||
:param old_conf: Cached old configuration settings (if any)
|
||||
:param has_changed: callable to determine if a parameter has changed
|
||||
"""
|
||||
eventlet.wsgi.MAX_HEADER_LINE = CONF.max_header_line
|
||||
self.client_socket_timeout = CONF.client_socket_timeout or None
|
||||
|
||||
# determine if we need to reload artifact type definitions
|
||||
if old_conf is not None and (
|
||||
has_changed('enabled_artifact_types') or
|
||||
has_changed('custom_artifact_types_modules')):
|
||||
from glare import engine
|
||||
engine.Engine.registry.reset_registry()
|
||||
engine.Engine.registry.register_all_artifacts()
|
||||
|
||||
self.configure_socket(old_conf, has_changed)
|
||||
if self.initialize_glance_store:
|
||||
initialize_glance_store()
|
||||
|
||||
def reload(self):
|
||||
"""Reload and re-apply configuration settings
|
||||
|
||||
Existing child processes are sent a SIGHUP signal
|
||||
and will exit after completing existing requests.
|
||||
New child processes, which will have the updated
|
||||
configuration, are spawned. This allows preventing
|
||||
interruption to the service.
|
||||
"""
|
||||
def _has_changed(old, new, param):
|
||||
old = old.get(param)
|
||||
new = getattr(new, param)
|
||||
return new != old
|
||||
|
||||
old_conf = utils.stash_conf_values()
|
||||
has_changed = functools.partial(_has_changed, old_conf, CONF)
|
||||
CONF.reload_config_files()
|
||||
os.killpg(self.pgid, signal.SIGHUP)
|
||||
self.stale_children = self.children
|
||||
self.children = set()
|
||||
|
||||
# Ensure any logging config changes are picked up
|
||||
logging.setup(CONF, 'glare')
|
||||
|
||||
self.configure(old_conf, has_changed)
|
||||
self.start_wsgi()
|
||||
|
||||
def wait(self):
|
||||
"""Wait until all servers have completed running."""
|
||||
try:
|
||||
if self.children:
|
||||
self.wait_on_children()
|
||||
else:
|
||||
self.pool.waitall()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
def run_child(self):
|
||||
def child_hup(*args):
|
||||
"""Shuts down child processes, existing requests are handled."""
|
||||
signal.signal(signal.SIGHUP, signal.SIG_IGN)
|
||||
eventlet.wsgi.is_accepting = False
|
||||
self.sock.close()
|
||||
|
||||
pid = os.fork()
|
||||
if pid == 0:
|
||||
signal.signal(signal.SIGHUP, child_hup)
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
# ignore the interrupt signal to avoid a race whereby
|
||||
# a child worker receives the signal before the parent
|
||||
# and is respawned unnecessarily as a result
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
# The child has no need to stash the unwrapped
|
||||
# socket, and the reference prevents a clean
|
||||
# exit on sighup
|
||||
self._sock = None
|
||||
self.run_server()
|
||||
LOG.info('Child %d exiting normally', os.getpid())
|
||||
# self.pool.waitall() is now called in wsgi's server so
|
||||
# it's safe to exit here
|
||||
sys.exit(0)
|
||||
else:
|
||||
LOG.info('Started child %s', pid)
|
||||
self.children.add(pid)
|
||||
|
||||
def run_server(self):
|
||||
"""Run a WSGI server."""
|
||||
eventlet.wsgi.HttpProtocol.default_request_version = "HTTP/1.0"
|
||||
self.pool = self.create_pool()
|
||||
try:
|
||||
eventlet.wsgi.server(self.sock,
|
||||
self.application,
|
||||
log=self._logger,
|
||||
custom_pool=self.pool,
|
||||
debug=False,
|
||||
keepalive=CONF.http_keepalive,
|
||||
socket_timeout=self.client_socket_timeout)
|
||||
except socket.error as err:
|
||||
if err[0] != errno.EINVAL:
|
||||
raise
|
||||
|
||||
# waiting on async pools
|
||||
if ASYNC_EVENTLET_THREAD_POOL_LIST:
|
||||
for pool in ASYNC_EVENTLET_THREAD_POOL_LIST:
|
||||
pool.waitall()
|
||||
|
||||
def _single_run(self, application, sock):
|
||||
"""Start a WSGI server in a new green thread."""
|
||||
LOG.info("Starting single process server")
|
||||
eventlet.wsgi.server(sock, application, custom_pool=self.pool,
|
||||
log=self._logger,
|
||||
debug=False,
|
||||
keepalive=CONF.http_keepalive,
|
||||
socket_timeout=self.client_socket_timeout)
|
||||
|
||||
def configure_socket(self, old_conf=None, has_changed=None):
|
||||
"""Ensure a socket exists and is appropriately configured.
|
||||
|
||||
This function is called on start up, and can also be
|
||||
called in the event of a configuration reload.
|
||||
|
||||
When called for the first time a new socket is created.
|
||||
If reloading and either bind_host or bind_port have been
|
||||
changed the existing socket must be closed and a new
|
||||
socket opened (laws of physics).
|
||||
|
||||
In all other cases (bind_host/bind_port have not changed)
|
||||
the existing socket is reused.
|
||||
|
||||
:param old_conf: Cached old configuration settings (if any)
|
||||
:param has_changed: callable to determine if a parameter has changed
|
||||
"""
|
||||
# Do we need a fresh socket?
|
||||
new_sock = (old_conf is None or (
|
||||
has_changed('bind_host') or
|
||||
has_changed('bind_port')))
|
||||
# Will we be using https?
|
||||
use_ssl = not (not CONF.cert_file or not CONF.key_file)
|
||||
# Were we using https before?
|
||||
old_use_ssl = (old_conf is not None and not (
|
||||
not old_conf.get('key_file') or
|
||||
not old_conf.get('cert_file')))
|
||||
# Do we now need to perform an SSL wrap on the socket?
|
||||
wrap_sock = use_ssl is True and (old_use_ssl is False or new_sock)
|
||||
# Do we now need to perform an SSL unwrap on the socket?
|
||||
unwrap_sock = use_ssl is False and old_use_ssl is True
|
||||
|
||||
if new_sock:
|
||||
self._sock = None
|
||||
if old_conf is not None:
|
||||
self.sock.close()
|
||||
_sock = get_socket(self.default_port)
|
||||
_sock.setsockopt(socket.SOL_SOCKET,
|
||||
socket.SO_REUSEADDR, 1)
|
||||
# sockets can hang around forever without keepalive
|
||||
_sock.setsockopt(socket.SOL_SOCKET,
|
||||
socket.SO_KEEPALIVE, 1)
|
||||
self._sock = _sock
|
||||
|
||||
if wrap_sock:
|
||||
self.sock = ssl_wrap_socket(self._sock)
|
||||
|
||||
if unwrap_sock or new_sock and not use_ssl:
|
||||
self.sock = self._sock
|
||||
|
||||
# Pick up newly deployed certs
|
||||
if old_conf is not None and use_ssl is True and old_use_ssl is True:
|
||||
if has_changed('cert_file') or has_changed('key_file'):
|
||||
utils.validate_key_cert(CONF.key_file, CONF.cert_file)
|
||||
if has_changed('cert_file'):
|
||||
self.sock.certfile = CONF.cert_file
|
||||
if has_changed('key_file'):
|
||||
self.sock.keyfile = CONF.key_file
|
||||
|
||||
if new_sock or (old_conf is not None and has_changed('tcp_keepidle')):
|
||||
# This option isn't available in the OS X version of eventlet
|
||||
if hasattr(socket, 'TCP_KEEPIDLE'):
|
||||
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE,
|
||||
CONF.tcp_keepidle)
|
||||
|
||||
if old_conf is not None and has_changed('backlog'):
|
||||
self.sock.listen(CONF.backlog)
|
||||
|
||||
|
||||
class APIMapper(routes.Mapper):
|
||||
"""Handle route matching when url is '' because routes.Mapper returns
|
||||
an error in this case.
|
||||
"""
|
||||
|
||||
def routematch(self, url=None, environ=None):
|
||||
if url is "":
|
||||
result = self._match("", environ)
|
||||
return result[0], result[1]
|
||||
return routes.Mapper.routematch(self, url, environ)
|
||||
|
||||
|
||||
class RejectMethodController(object):
|
||||
def reject(self, req, allowed_methods, *args, **kwargs):
|
||||
LOG.debug("The method %s is not allowed for this resource",
|
||||
req.environ['REQUEST_METHOD'])
|
||||
raise webob.exc.HTTPMethodNotAllowed(
|
||||
headers=[('Allow', allowed_methods)])
|
||||
|
||||
|
||||
class Router(object):
|
||||
"""WSGI middleware that maps incoming requests to WSGI apps.
|
||||
"""
|
||||
|
||||
def __init__(self, mapper):
|
||||
"""Create a router for the given routes.Mapper.
|
||||
|
||||
Each route in `mapper` must specify a 'controller', which is a
|
||||
WSGI app to call. You'll probably want to specify an 'action' as
|
||||
well and have your controller be a wsgi.Controller, who will route
|
||||
the request to the action method.
|
||||
|
||||
Examples:
|
||||
mapper = routes.Mapper()
|
||||
sc = ServerController()
|
||||
|
||||
# Explicit mapping of one route to a controller+action
|
||||
mapper.connect(None, "/svrlist", controller=sc, action="list")
|
||||
|
||||
# Actions are all implicitly defined
|
||||
mapper.resource("server", "servers", controller=sc)
|
||||
|
||||
# Pointing to an arbitrary WSGI app. You can specify the
|
||||
# {path_info:.*} parameter so the target app can be handed just that
|
||||
# section of the URL.
|
||||
mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp())
|
||||
"""
|
||||
mapper.redirect("", "/")
|
||||
self.map = mapper
|
||||
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
|
||||
self.map)
|
||||
|
||||
@classmethod
|
||||
def factory(cls, global_conf, **local_conf):
|
||||
return cls(APIMapper())
|
||||
|
||||
@webob.dec.wsgify
|
||||
def __call__(self, req):
|
||||
"""Route the incoming request to a controller based on self.map.
|
||||
If no match, return either a 404(Not Found) or 501(Not Implemented).
|
||||
"""
|
||||
return self._router
|
||||
|
||||
@staticmethod
|
||||
@webob.dec.wsgify
|
||||
def _dispatch(req):
|
||||
"""Called by self._router after matching the incoming request to a route
|
||||
and putting the information into req.environ. Either returns 404,
|
||||
501, or the routed WSGI app's response.
|
||||
"""
|
||||
match = req.environ['wsgiorg.routing_args'][1]
|
||||
if not match:
|
||||
implemented_http_methods = ['GET', 'HEAD', 'POST', 'PUT',
|
||||
'DELETE', 'PATCH']
|
||||
if req.environ['REQUEST_METHOD'] not in implemented_http_methods:
|
||||
return webob.exc.HTTPNotImplemented()
|
||||
else:
|
||||
return webob.exc.HTTPNotFound()
|
||||
app = match['controller']
|
||||
return app
|
||||
|
||||
|
||||
class Request(webob.Request):
|
||||
"""Add some OpenStack API-specific logic to the base webob.Request."""
|
||||
|
||||
def best_match_content_type(self):
|
||||
"""Determine the requested response content-type."""
|
||||
supported = ('application/json',)
|
||||
bm = self.accept.best_match(supported)
|
||||
return bm or 'application/json'
|
||||
|
||||
def best_match_language(self):
|
||||
"""Determines best available locale from the Accept-Language header.
|
||||
|
||||
:returns: the best language match or None if the 'Accept-Language'
|
||||
header was not available in the request.
|
||||
"""
|
||||
if not self.accept_language:
|
||||
return None
|
||||
langs = i18n.get_available_languages('glare')
|
||||
return self.accept_language.best_match(langs)
|
||||
|
||||
def get_content_range(self):
|
||||
"""Return the `Range` in a request."""
|
||||
range_str = self.headers.get('Content-Range')
|
||||
if range_str is not None:
|
||||
range_ = webob.byterange.ContentRange.parse(range_str)
|
||||
if range_ is None:
|
||||
msg = _('Malformed Content-Range header: %s') % range_str
|
||||
raise webob.exc.HTTPBadRequest(explanation=msg)
|
||||
return range_
|
||||
|
||||
|
||||
class JSONRequestDeserializer(object):
|
||||
valid_transfer_encoding = frozenset(['chunked', 'compress', 'deflate',
|
||||
'gzip', 'identity'])
|
||||
|
||||
httpverb_may_have_body = frozenset({'POST', 'PUT', 'PATCH'})
|
||||
|
||||
@classmethod
|
||||
def is_valid_encoding(cls, request):
|
||||
request_encoding = request.headers.get('transfer-encoding', '').lower()
|
||||
return request_encoding in cls.valid_transfer_encoding
|
||||
|
||||
@classmethod
|
||||
def is_valid_method(cls, request):
|
||||
return request.method.upper() in cls.httpverb_may_have_body
|
||||
|
||||
def has_body(self, request):
|
||||
"""Returns whether a Webob.Request object will possess an entity body.
|
||||
|
||||
:param request: Webob.Request object
|
||||
"""
|
||||
|
||||
if self.is_valid_encoding(request) and self.is_valid_method(request):
|
||||
request.is_body_readable = True
|
||||
return True
|
||||
|
||||
if request.content_length is not None and request.content_length > 0:
|
||||
return True
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def _sanitizer(obj):
|
||||
"""Sanitizer method that will be passed to jsonutils.loads."""
|
||||
return obj
|
||||
|
||||
def from_json(self, datastring):
|
||||
try:
|
||||
jsondata = jsonutils.loads(datastring, object_hook=self._sanitizer)
|
||||
if not isinstance(jsondata, (dict, list)):
|
||||
msg = _('Unexpected body type. Expected list/dict.')
|
||||
raise webob.exc.HTTPBadRequest(explanation=msg)
|
||||
return jsondata
|
||||
except ValueError:
|
||||
msg = _('Malformed JSON in request body.')
|
||||
raise webob.exc.HTTPBadRequest(explanation=msg)
|
||||
|
||||
def default(self, request):
|
||||
if self.has_body(request):
|
||||
return {'body': self.from_json(request.body)}
|
||||
else:
|
||||
return {}
|
||||
|
||||
|
||||
class JSONResponseSerializer(object):
|
||||
|
||||
def _sanitizer(self, obj):
|
||||
"""Sanitizer method that will be passed to jsonutils.dumps."""
|
||||
if hasattr(obj, "to_dict"):
|
||||
return obj.to_dict()
|
||||
if isinstance(obj, multidict.MultiDict):
|
||||
return obj.mixed()
|
||||
return jsonutils.to_primitive(obj)
|
||||
|
||||
def to_json(self, data):
|
||||
return jsonutils.dump_as_bytes(data, default=self._sanitizer)
|
||||
|
||||
def default(self, response, result):
|
||||
response.content_type = 'application/json'
|
||||
body = self.to_json(result)
|
||||
body = encodeutils.to_utf8(body)
|
||||
response.body = body
|
||||
|
||||
|
||||
def translate_exception(req, e):
|
||||
"""Translates all translatable elements of the given exception."""
|
||||
|
||||
# The RequestClass attribute in the webob.dec.wsgify decorator
|
||||
# does not guarantee that the request object will be a particular
|
||||
# type; this check is therefore necessary.
|
||||
if not hasattr(req, "best_match_language"):
|
||||
return e
|
||||
|
||||
locale = req.best_match_language()
|
||||
|
||||
if isinstance(e, webob.exc.HTTPError):
|
||||
e.explanation = i18n.translate(e.explanation, locale)
|
||||
e.detail = i18n.translate(e.detail, locale)
|
||||
if getattr(e, 'body_template', None):
|
||||
e.body_template = i18n.translate(e.body_template, locale)
|
||||
return e
|
||||
|
||||
|
||||
class Resource(object):
|
||||
"""WSGI app that handles (de)serialization and controller dispatch.
|
||||
|
||||
Reads routing information supplied by RoutesMiddleware and calls
|
||||
the requested action method upon its deserializer, controller,
|
||||
and serializer. Those three objects may implement any of the basic
|
||||
controller action methods (create, update, show, index, delete)
|
||||
along with any that may be specified in the api router. A 'default'
|
||||
method may also be implemented to be used in place of any
|
||||
non-implemented actions. Deserializer methods must accept a request
|
||||
argument and return a dictionary. Controller methods must accept a
|
||||
request argument. Additionally, they must also accept keyword
|
||||
arguments that represent the keys returned by the Deserializer. They
|
||||
may raise a webob.exc exception or return a dict, which will be
|
||||
serialized by requested content type.
|
||||
"""
|
||||
|
||||
def __init__(self, controller, deserializer=None, serializer=None):
|
||||
"""
|
||||
:param controller: object that implement methods created by routes lib
|
||||
:param deserializer: object that supports webob request deserialization
|
||||
through controller-like actions
|
||||
:param serializer: object that supports webob response serialization
|
||||
through controller-like actions
|
||||
"""
|
||||
self.controller = controller
|
||||
self.serializer = serializer or JSONResponseSerializer()
|
||||
self.deserializer = deserializer or JSONRequestDeserializer()
|
||||
|
||||
@webob.dec.wsgify(RequestClass=Request)
|
||||
def __call__(self, request):
|
||||
"""WSGI method that controls (de)serialization and method dispatch."""
|
||||
action_args = self.get_action_args(request.environ)
|
||||
action = action_args.pop('action', None)
|
||||
body_reject = strutils.bool_from_string(
|
||||
action_args.pop('body_reject', None))
|
||||
|
||||
try:
|
||||
if body_reject and self.deserializer.has_body(request):
|
||||
msg = _('A body is not expected with this request.')
|
||||
raise webob.exc.HTTPBadRequest(explanation=msg)
|
||||
deserialized_request = self.dispatch(self.deserializer,
|
||||
action, request)
|
||||
action_args.update(deserialized_request)
|
||||
action_result = self.dispatch(self.controller, action,
|
||||
request, **action_args)
|
||||
except webob.exc.WSGIHTTPException as e:
|
||||
exc_info = sys.exc_info()
|
||||
e = translate_exception(request, e)
|
||||
six.reraise(type(e), e, exc_info[2])
|
||||
except glare_exc.GlareException:
|
||||
raise
|
||||
except UnicodeDecodeError:
|
||||
msg = _("Error decoding your request. Either the URL or the "
|
||||
"request body contained characters that could not be "
|
||||
"decoded by Glare")
|
||||
raise webob.exc.HTTPBadRequest(explanation=msg)
|
||||
except Exception as e:
|
||||
LOG.exception("Caught error: %s",
|
||||
encodeutils.exception_to_unicode(e))
|
||||
response = webob.exc.HTTPInternalServerError(explanation=str(e))
|
||||
return response
|
||||
|
||||
try:
|
||||
response = webob.Response(request=request)
|
||||
self.dispatch(self.serializer, action, response, action_result)
|
||||
# encode all headers in response to utf-8 to prevent unicode errors
|
||||
for name, value in list(response.headers.items()):
|
||||
if six.PY2 and isinstance(value, six.text_type):
|
||||
response.headers[name] = encodeutils.safe_encode(value)
|
||||
return response
|
||||
except webob.exc.WSGIHTTPException as e:
|
||||
return translate_exception(request, e)
|
||||
except webob.exc.HTTPException as e:
|
||||
return e
|
||||
except glare_exc.GlareException:
|
||||
raise
|
||||
# return unserializable result (typically a webob exc)
|
||||
except Exception:
|
||||
return action_result
|
||||
|
||||
def dispatch(self, obj, action, *args, **kwargs):
|
||||
"""Find action-specific method on self and call it."""
|
||||
try:
|
||||
method = getattr(obj, action)
|
||||
except AttributeError:
|
||||
method = getattr(obj, 'default')
|
||||
|
||||
return method(*args, **kwargs)
|
||||
|
||||
def get_action_args(self, request_environment):
|
||||
"""Parse dictionary created by routes library."""
|
||||
try:
|
||||
args = request_environment['wsgiorg.routing_args'][1].copy()
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
args.pop("controller", None)
|
||||
args.pop("format", None)
|
||||
|
||||
return args
|
||||
@@ -1,133 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Database API for all artifact types"""
|
||||
|
||||
from oslo_db import exception as db_exception
|
||||
from oslo_log import log as logging
|
||||
from retrying import retry
|
||||
|
||||
from glare.db.sqlalchemy import api
|
||||
from glare import locking
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _retry_on_connection_error(exc):
|
||||
"""Function to retry a DB API call if connection error was received."""
|
||||
|
||||
if isinstance(exc, db_exception.DBConnectionError):
|
||||
LOG.warning("Connection error detected. Retrying...")
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class ArtifactAPI(object):
|
||||
|
||||
def _serialize_values(self, values):
|
||||
new_values = {}
|
||||
if 'tags' in values:
|
||||
new_values['tags'] = values.pop('tags') if values['tags'] else []
|
||||
for key, value in values.items():
|
||||
if key in api.BASE_ARTIFACT_PROPERTIES:
|
||||
new_values[key] = value
|
||||
else:
|
||||
new_values.setdefault('properties', {})[key] = value
|
||||
return new_values
|
||||
|
||||
@retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000,
|
||||
stop_max_attempt_number=20)
|
||||
def save(self, context, artifact_id, values):
|
||||
"""Save artifact values in database
|
||||
|
||||
:param artifact_id: id of artifact that needs to be updated
|
||||
:param context: user context
|
||||
:param values: values that needs to be updated
|
||||
:return: dict of updated artifact values
|
||||
"""
|
||||
session = api.get_session()
|
||||
return api.create_or_update(
|
||||
context, artifact_id, self._serialize_values(values), session)
|
||||
|
||||
@retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000,
|
||||
stop_max_attempt_number=20)
|
||||
def update_blob(self, context, artifact_id, values):
|
||||
"""Create and update blob records in db
|
||||
|
||||
:param artifact_id: id of artifact that needs to be updated
|
||||
:param context: user context
|
||||
:param values: blob values that needs to be updated
|
||||
:return: dict of updated artifact values
|
||||
"""
|
||||
session = api.get_session()
|
||||
return api.create_or_update(
|
||||
context, artifact_id, {'blobs': values}, session)
|
||||
|
||||
@retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000,
|
||||
stop_max_attempt_number=20)
|
||||
def delete(self, context, artifact_id):
|
||||
"""Delete artifacts from db
|
||||
|
||||
:param context: user context
|
||||
:param artifact_id: id of artifact that needs to be deleted
|
||||
"""
|
||||
session = api.get_session()
|
||||
api.delete(context, artifact_id, session)
|
||||
|
||||
@retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000,
|
||||
stop_max_attempt_number=20)
|
||||
def get(self, context, artifact_id):
|
||||
"""Return artifact values from database
|
||||
|
||||
:param context: user context
|
||||
:param artifact_id: id of the artifact
|
||||
:return: dict of artifact values
|
||||
"""
|
||||
session = api.get_session()
|
||||
return api.get(context, artifact_id, session)
|
||||
|
||||
@retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000,
|
||||
stop_max_attempt_number=20)
|
||||
def list(self, context, filters, marker, limit, sort, latest):
|
||||
"""List artifacts from db
|
||||
|
||||
:param context: user request context
|
||||
:param filters: filter conditions from url
|
||||
:param marker: id of first artifact where we need to start
|
||||
artifact lookup
|
||||
:param limit: max number of items in list
|
||||
:param sort: sort conditions
|
||||
:param latest: flag that indicates, that only artifacts with highest
|
||||
versions should be returned in output
|
||||
:return: list of artifacts. Each artifact is represented as dict of
|
||||
values.
|
||||
"""
|
||||
session = api.get_session()
|
||||
return api.get_all(context=context, session=session, filters=filters,
|
||||
marker=marker, limit=limit, sort=sort,
|
||||
latest=latest)
|
||||
|
||||
|
||||
class ArtifactLockApi(locking.LockApiBase):
|
||||
@retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000,
|
||||
stop_max_attempt_number=20)
|
||||
def create_lock(self, context, lock_key):
|
||||
session = api.get_session()
|
||||
return api.create_lock(context, lock_key, session)
|
||||
|
||||
@retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000,
|
||||
stop_max_attempt_number=20)
|
||||
def delete_lock(self, context, lock_id):
|
||||
session = api.get_session()
|
||||
api.delete_lock(context, lock_id, session)
|
||||
@@ -1,54 +0,0 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = glare/db/migration/alembic_migrations
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
#truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
sqlalchemy.url =
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -1,15 +0,0 @@
|
||||
Please see https://alembic.readthedocs.org/en/latest/index.html for general documentation
|
||||
|
||||
To create alembic migrations use:
|
||||
$ glare-db-manage revision --message --autogenerate
|
||||
|
||||
Stamp db with most recent migration version, without actually running migrations
|
||||
$ glare-db-manage stamp --revision head
|
||||
|
||||
Upgrade can be performed by:
|
||||
$ glare-db-manage upgrade
|
||||
$ glare-db-manage upgrade --revision head
|
||||
|
||||
Downgrading db:
|
||||
$ glare-db-manage downgrade
|
||||
$ glare-db-manage downgrade --revision base
|
||||
@@ -1,45 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from alembic import context
|
||||
|
||||
from glare.db.sqlalchemy import api
|
||||
from glare.db.sqlalchemy import models
|
||||
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
target_metadata = models.BASE.metadata
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
engine = api.get_engine()
|
||||
|
||||
with engine.connect() as connection:
|
||||
context.configure(connection=connection,
|
||||
target_metadata=target_metadata)
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
run_migrations_online()
|
||||
@@ -1,37 +0,0 @@
|
||||
# Copyright ${create_date.year} OpenStack Foundation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -1,167 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Initial version
|
||||
|
||||
Revision ID: 001
|
||||
Revises: None
|
||||
Create Date: 2016-08-18 12:28:37.372366
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '001'
|
||||
down_revision = None
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
MYSQL_ENGINE = 'InnoDB'
|
||||
MYSQL_CHARSET = 'utf8'
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
'glare_artifacts',
|
||||
sa.Column('id', sa.String(36), primary_key=True, nullable=False),
|
||||
sa.Column('name', sa.String(255), nullable=False),
|
||||
sa.Column('type_name', sa.String(255), nullable=False),
|
||||
sa.Column('version_prefix', sa.BigInteger(), nullable=False),
|
||||
sa.Column('version_suffix', sa.String(255)),
|
||||
sa.Column('version_meta', sa.String(255)),
|
||||
sa.Column('description', sa.Text()),
|
||||
sa.Column('visibility', sa.String(32), nullable=False),
|
||||
sa.Column('status', sa.String(32), nullable=False),
|
||||
sa.Column('owner', sa.String(255)),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('activated_at', sa.DateTime()),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_engine=MYSQL_ENGINE,
|
||||
mysql_charset=MYSQL_CHARSET
|
||||
)
|
||||
|
||||
op.create_index('ix_glare_artifact_name_and_version',
|
||||
'glare_artifacts',
|
||||
['name', 'version_prefix', 'version_suffix']
|
||||
)
|
||||
op.create_index('ix_glare_artifact_type',
|
||||
'glare_artifacts',
|
||||
['type_name']
|
||||
)
|
||||
op.create_index('ix_glare_artifact_status',
|
||||
'glare_artifacts',
|
||||
['status']
|
||||
)
|
||||
op.create_index('ix_glare_artifact_owner',
|
||||
'glare_artifacts',
|
||||
['owner']
|
||||
)
|
||||
op.create_index('ix_glare_artifact_visibility',
|
||||
'glare_artifacts',
|
||||
['visibility']
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'glare_artifact_tags',
|
||||
sa.Column('id', sa.String(36), primary_key=True, nullable=False),
|
||||
sa.Column('artifact_id', sa.String(36),
|
||||
sa.ForeignKey('glare_artifacts.id'), nullable=False),
|
||||
sa.Column('value', sa.String(255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_engine=MYSQL_ENGINE,
|
||||
mysql_charset=MYSQL_CHARSET
|
||||
)
|
||||
|
||||
op.create_index('ix_glare_artifact_tags_artifact_id',
|
||||
'glare_artifact_tags',
|
||||
['artifact_id']
|
||||
)
|
||||
op.create_index('ix_glare_artifact_tags_artifact_id_tag_value',
|
||||
'glare_artifact_tags',
|
||||
['artifact_id', 'value']
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'glare_artifact_blobs',
|
||||
sa.Column('id', sa.String(36), primary_key=True, nullable=False),
|
||||
sa.Column('artifact_id', sa.String(36),
|
||||
sa.ForeignKey('glare_artifacts.id'), nullable=False),
|
||||
sa.Column('size', sa.BigInteger()),
|
||||
sa.Column('md5', sa.String(32)),
|
||||
sa.Column('sha1', sa.String(40)),
|
||||
sa.Column('sha256', sa.String(64)),
|
||||
sa.Column('name', sa.String(255), nullable=False),
|
||||
sa.Column('status', sa.String(32), nullable=False),
|
||||
sa.Column('external', sa.Boolean()),
|
||||
sa.Column('url', sa.Text()),
|
||||
sa.Column('key_name', sa.String(255)),
|
||||
sa.Column('content_type', sa.String(255)),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_engine=MYSQL_ENGINE,
|
||||
mysql_charset=MYSQL_CHARSET
|
||||
)
|
||||
|
||||
op.create_index('ix_glare_artifact_blobs_artifact_id',
|
||||
'glare_artifact_blobs',
|
||||
['artifact_id']
|
||||
)
|
||||
op.create_index('ix_glare_artifact_blobs_name',
|
||||
'glare_artifact_blobs',
|
||||
['name']
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'glare_artifact_properties',
|
||||
sa.Column('id', sa.String(36), primary_key=True, nullable=False),
|
||||
sa.Column('artifact_id', sa.String(36),
|
||||
sa.ForeignKey('glare_artifacts.id'), nullable=False),
|
||||
sa.Column('name', sa.String(255), nullable=False),
|
||||
sa.Column('string_value', sa.String(20000)),
|
||||
sa.Column('int_value', sa.Integer()),
|
||||
sa.Column('numeric_value', sa.Numeric()),
|
||||
sa.Column('bool_value', sa.Boolean()),
|
||||
sa.Column('position', sa.Integer()),
|
||||
sa.Column('key_name', sa.String(255)),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_engine=MYSQL_ENGINE,
|
||||
mysql_charset=MYSQL_CHARSET
|
||||
)
|
||||
|
||||
op.create_index('ix_glare_artifact_properties_artifact_id',
|
||||
'glare_artifact_properties',
|
||||
['artifact_id']
|
||||
)
|
||||
op.create_index('ix_glare_artifact_properties_name',
|
||||
'glare_artifact_properties',
|
||||
['name']
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'glare_artifact_locks',
|
||||
sa.Column('id', sa.String(255), primary_key=True, nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_engine=MYSQL_ENGINE,
|
||||
mysql_charset=MYSQL_CHARSET
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table('glare_artifact_locks')
|
||||
op.drop_table('glare_artifact_properties')
|
||||
op.drop_table('glare_artifact_blobs')
|
||||
op.drop_table('glare_artifact_tags')
|
||||
op.drop_table('glare_artifacts')
|
||||
|
||||
# end Alembic commands #
|
||||
@@ -1,57 +0,0 @@
|
||||
# Copyright 2016 OpenStack Foundation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Add acquired_at column
|
||||
|
||||
Revision ID: 002
|
||||
Revises: 001
|
||||
Create Date: 2016-10-05 16:03:43.207147
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '002'
|
||||
down_revision = '001'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
MYSQL_ENGINE = 'InnoDB'
|
||||
MYSQL_CHARSET = 'utf8'
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.drop_table('glare_artifact_locks')
|
||||
|
||||
op.create_table(
|
||||
'glare_artifact_locks',
|
||||
sa.Column('id', sa.String(255), primary_key=True, nullable=False),
|
||||
sa.Column('acquired_at', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_engine=MYSQL_ENGINE,
|
||||
mysql_charset=MYSQL_CHARSET
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table('glare_artifact_locks')
|
||||
|
||||
op.create_table(
|
||||
'glare_artifact_locks',
|
||||
sa.Column('id', sa.String(255), primary_key=True, nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_engine=MYSQL_ENGINE,
|
||||
mysql_charset=MYSQL_CHARSET
|
||||
)
|
||||
@@ -1,54 +0,0 @@
|
||||
# Copyright 2017 OpenStack Foundation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Add acquired_at column
|
||||
|
||||
Revision ID: 003
|
||||
Revises: 002
|
||||
Create Date: 2017-01-10 12:53:25.108149
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '003'
|
||||
down_revision = '002'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
MYSQL_ENGINE = 'InnoDB'
|
||||
MYSQL_CHARSET = 'utf8'
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
'glare_blob_data',
|
||||
sa.Column('id', sa.String(255), primary_key=True, nullable=False),
|
||||
# Because of strange behavior of mysql LargeBinary is converted to
|
||||
# BLOB instead of LONGBLOB. So we have to fix it explicitly with
|
||||
# 'with_variant' call.
|
||||
sa.Column(
|
||||
'data',
|
||||
sa.LargeBinary().with_variant(mysql.LONGBLOB(), 'mysql'),
|
||||
nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_engine=MYSQL_ENGINE,
|
||||
mysql_charset=MYSQL_CHARSET
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table('glare_blob_data')
|
||||
@@ -1,86 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
|
||||
import alembic
|
||||
from alembic import config as alembic_config
|
||||
from alembic import migration as alembic_migration
|
||||
|
||||
from glare.db.sqlalchemy import api as db_api
|
||||
|
||||
|
||||
def get_alembic_config():
|
||||
path = os.path.join(os.path.dirname(__file__), 'alembic.ini')
|
||||
|
||||
config = alembic_config.Config(path)
|
||||
config.set_main_option('script_location',
|
||||
'glare.db.migration:alembic_migrations')
|
||||
return config
|
||||
|
||||
|
||||
def version(engine=None):
|
||||
"""Returns current database version."""
|
||||
engine = engine or db_api.get_engine()
|
||||
with engine.connect() as conn:
|
||||
context = alembic_migration.MigrationContext.configure(conn)
|
||||
return context.get_current_revision()
|
||||
|
||||
|
||||
def upgrade(revision, config=None):
|
||||
"""Used for upgrading database.
|
||||
|
||||
:param revision: Desired database version
|
||||
:type revision: string
|
||||
"""
|
||||
revision = revision or 'head'
|
||||
config = config or get_alembic_config()
|
||||
|
||||
alembic.command.upgrade(config, revision or 'head')
|
||||
|
||||
|
||||
def downgrade(revision, config=None):
|
||||
"""Used for downgrading database.
|
||||
|
||||
:param revision: Desired database version7
|
||||
:type revision: string
|
||||
"""
|
||||
revision = revision or 'base'
|
||||
config = config or get_alembic_config()
|
||||
return alembic.command.downgrade(config, revision)
|
||||
|
||||
|
||||
def stamp(revision, config=None):
|
||||
"""Stamps database with provided revision.
|
||||
|
||||
Don't run any migrations.
|
||||
|
||||
:param revision: Should match one from repository or head - to stamp
|
||||
database with most recent revision
|
||||
:type revision: string
|
||||
"""
|
||||
config = config or get_alembic_config()
|
||||
return alembic.command.stamp(config, revision=revision)
|
||||
|
||||
|
||||
def revision(message=None, autogenerate=False, config=None):
|
||||
"""Creates template for migration.
|
||||
|
||||
:param message: Text that will be used for migration title
|
||||
:type message: string
|
||||
:param autogenerate: If True - generates diff based on current database
|
||||
state
|
||||
:type autogenerate: bool
|
||||
"""
|
||||
config = config or get_alembic_config()
|
||||
return alembic.command.revision(config, message=message,
|
||||
autogenerate=autogenerate)
|
||||
@@ -1,671 +0,0 @@
|
||||
# Copyright (c) 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import operator
|
||||
import threading
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_db import exception as db_exception
|
||||
from oslo_db import options
|
||||
from oslo_db.sqlalchemy import session
|
||||
from oslo_log import log as os_logging
|
||||
from oslo_utils import timeutils
|
||||
import osprofiler.sqlalchemy
|
||||
from retrying import retry
|
||||
import six
|
||||
import sqlalchemy
|
||||
from sqlalchemy import and_
|
||||
import sqlalchemy.exc
|
||||
from sqlalchemy import exists
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy import or_
|
||||
import sqlalchemy.orm as orm
|
||||
from sqlalchemy.orm import aliased
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from glare.common import exception
|
||||
from glare.common import semver_db
|
||||
from glare.common import utils
|
||||
from glare.db.sqlalchemy import models
|
||||
from glare.i18n import _
|
||||
|
||||
LOG = os_logging.getLogger(__name__)
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.import_group("profiler", "glare.common.wsgi")
|
||||
options.set_defaults(CONF)
|
||||
|
||||
|
||||
BASE_ARTIFACT_PROPERTIES = ('id', 'visibility', 'created_at', 'updated_at',
|
||||
'activated_at', 'owner', 'status', 'description',
|
||||
'name', 'type_name', 'version')
|
||||
|
||||
_FACADE = None
|
||||
_LOCK = threading.Lock()
|
||||
|
||||
|
||||
def _retry_on_deadlock(exc):
|
||||
"""Decorator to retry a DB API call if Deadlock was received."""
|
||||
|
||||
if isinstance(exc, db_exception.DBDeadlock):
|
||||
LOG.warning("Deadlock detected. Retrying...")
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _create_facade_lazily():
|
||||
global _LOCK, _FACADE
|
||||
if _FACADE is None:
|
||||
with _LOCK:
|
||||
if _FACADE is None:
|
||||
_FACADE = session.EngineFacade.from_config(CONF)
|
||||
|
||||
if CONF.profiler.enabled and CONF.profiler.trace_sqlalchemy:
|
||||
osprofiler.sqlalchemy.add_tracing(sqlalchemy,
|
||||
_FACADE.get_engine(),
|
||||
"db")
|
||||
return _FACADE
|
||||
|
||||
|
||||
def get_engine():
|
||||
facade = _create_facade_lazily()
|
||||
return facade.get_engine()
|
||||
|
||||
|
||||
def get_session(autocommit=True, expire_on_commit=False):
|
||||
facade = _create_facade_lazily()
|
||||
return facade.get_session(autocommit=autocommit,
|
||||
expire_on_commit=expire_on_commit)
|
||||
|
||||
|
||||
def setup_db():
|
||||
engine = get_engine()
|
||||
models.register_models(engine)
|
||||
|
||||
|
||||
def drop_db():
|
||||
engine = get_engine()
|
||||
models.unregister_models(engine)
|
||||
|
||||
|
||||
@retry(retry_on_exception=_retry_on_deadlock, wait_fixed=500,
|
||||
stop_max_attempt_number=50)
|
||||
def delete(context, artifact_id, session):
|
||||
with session.begin():
|
||||
session.query(models.Artifact).filter_by(id=artifact_id).delete()
|
||||
|
||||
|
||||
def _drop_protected_attrs(model_class, values):
|
||||
"""Removed protected attributes from values dictionary using the models
|
||||
__protected_attributes__ field.
|
||||
"""
|
||||
for attr in model_class.__protected_attributes__:
|
||||
if attr in values:
|
||||
del values[attr]
|
||||
|
||||
|
||||
@retry(retry_on_exception=_retry_on_deadlock, wait_fixed=500,
|
||||
stop_max_attempt_number=50)
|
||||
@utils.no_4byte_params
|
||||
def create_or_update(context, artifact_id, values, session):
|
||||
with session.begin():
|
||||
_drop_protected_attrs(models.Artifact, values)
|
||||
if artifact_id is None:
|
||||
# create new artifact
|
||||
artifact = models.Artifact()
|
||||
artifact.id = values.pop('id')
|
||||
else:
|
||||
# update the existing artifact
|
||||
artifact = _get(context, artifact_id, session)
|
||||
|
||||
if 'version' in values:
|
||||
values['version'] = semver_db.parse(values['version'])
|
||||
|
||||
if 'tags' in values:
|
||||
tags = values.pop('tags')
|
||||
artifact.tags = _do_tags(artifact, tags)
|
||||
|
||||
if 'properties' in values:
|
||||
properties = values.pop('properties', {})
|
||||
artifact.properties = _do_properties(artifact, properties)
|
||||
|
||||
if 'blobs' in values:
|
||||
blobs = values.pop('blobs')
|
||||
artifact.blobs = _do_blobs(artifact, blobs)
|
||||
|
||||
artifact.updated_at = timeutils.utcnow()
|
||||
if 'status' in values:
|
||||
if session.query(exists().where(and_(
|
||||
models.ArtifactBlob.status == 'saving',
|
||||
models.ArtifactBlob.artifact_id == artifact_id))
|
||||
).one()[0]:
|
||||
raise exception.Conflict(
|
||||
"You cannot change artifact status if it has "
|
||||
"uploading blobs.")
|
||||
if values['status'] == 'active':
|
||||
artifact.activated_at = timeutils.utcnow()
|
||||
artifact.update(values)
|
||||
|
||||
LOG.debug('Sending request to the database. '
|
||||
'New values are %s', values)
|
||||
artifact.save(session=session)
|
||||
LOG.debug('Response from the database was received.')
|
||||
|
||||
return artifact.to_dict()
|
||||
|
||||
|
||||
def _get(context, artifact_id, session):
|
||||
try:
|
||||
query = _do_artifacts_query(context, session).filter_by(
|
||||
id=artifact_id)
|
||||
artifact = query.one()
|
||||
except orm.exc.NoResultFound:
|
||||
msg = _("Artifact with id=%s not found.") % artifact_id
|
||||
LOG.warning(msg)
|
||||
raise exception.ArtifactNotFound(msg)
|
||||
return artifact
|
||||
|
||||
|
||||
def get(context, artifact_id, session):
|
||||
return _get(context, artifact_id, session).to_dict()
|
||||
|
||||
|
||||
def get_all(context, session, filters=None, marker=None, limit=None,
|
||||
sort=None, latest=False):
|
||||
"""List all visible artifacts
|
||||
|
||||
:param filters: dict of filter keys and values.
|
||||
:param marker: artifact id after which to start page
|
||||
:param limit: maximum number of artifacts to return
|
||||
:param sort: a tuple (key, dir, type) where key is an attribute by
|
||||
which results should be sorted, dir is a direction: 'asc' or 'desc',
|
||||
and type is type of the attribute: 'bool', 'string', 'numeric' or 'int' or
|
||||
None if attribute is base.
|
||||
:param latest: flag that indicates, that only artifacts with highest
|
||||
versions should be returned in output
|
||||