From 519b1b1b8e1d0b09dde18339a60fd6c82aeaba58 Mon Sep 17 00:00:00 2001 From: Tony Breeds Date: Tue, 12 Sep 2017 15:38:36 -0600 Subject: [PATCH] Retire Packaging Deb project repos This commit is part of a series to retire the Packaging Deb project. Step 2 is to remove all content from the project repos, replacing it with a README notification where to find ongoing work, and how to recover the repo if needed at some future point (as in https://docs.openstack.org/infra/manual/drivers.html#retiring-a-project). Change-Id: I999443d9fd240490df5a2b232e6c9c6b55a04a34 --- .gitignore | 97 - .gitreview | 4 - .testr.conf | 8 - LICENSE | 201 -- README | 14 + README.rst | 18 - babel.cfg | 1 - bandit.yaml | 245 -- devstack/README.rst | 24 - devstack/plugin.sh | 218 -- devstack/settings | 44 - doc/source/_static/.placeholder | 0 doc/source/_templates/sidebarlinks.html | 11 - doc/source/_theme/layout.css | 4 - doc/source/_theme/theme.conf | 4 - doc/source/architecture.rst | 93 - doc/source/conf.py | 257 -- .../creating_custom_artifact_type.rst | 174 -- doc/source/developer/devstack.rst | 4 - doc/source/developer/index.rst | 10 - doc/source/developer/troubleshooting.rst | 4 - doc/source/developer/webapi/index.rst | 7 - doc/source/developer/webapi/v1.rst | 1331 ---------- doc/source/guides/configuration_guide.rst | 4 - doc/source/guides/dashboard_guide.rst | 4 - doc/source/guides/glareclient_guide.rst | 85 - doc/source/guides/hooks_guide.rst | 4 - doc/source/guides/installation_guide.rst | 4 - doc/source/guides/upgrade_guide.rst | 7 - doc/source/images/glare-architecture.png | Bin 41233 -> 0 bytes .../images_src/glare-architecture.graphml | 876 ------- doc/source/index.rst | 58 - doc/source/main_features.rst | 4 - doc/source/overview.rst | 17 - doc/source/quickstart.rst | 4 - etc/glare-paste.ini | 53 - etc/glare-swift.conf.sample | 25 - etc/oslo-config-generator/glare.conf | 13 - glare/__init__.py | 0 glare/api/__init__.py | 0 glare/api/middleware/__init__.py | 0 glare/api/middleware/context.py | 158 -- glare/api/middleware/fault.py | 128 - glare/api/middleware/keycloak_auth.py | 134 - glare/api/middleware/version_negotiation.py | 125 - glare/api/v1/__init__.py | 0 glare/api/v1/api_version_request.py | 123 - glare/api/v1/api_versioning.py | 172 -- glare/api/v1/resource.py | 464 ---- glare/api/v1/router.py | 100 - glare/api/versions.py | 96 - glare/cmd/__init__.py | 53 - glare/cmd/api.py | 90 - glare/cmd/db_manage.py | 80 - glare/cmd/scrubber.py | 73 - glare/common/__init__.py | 0 glare/common/config.py | 155 -- glare/common/exception.py | 166 -- glare/common/policy.py | 128 - glare/common/semver_db.py | 173 -- glare/common/store_api.py | 114 - glare/common/utils.py | 582 ----- glare/common/wsgi.py | 834 ------ glare/db/__init__.py | 0 glare/db/artifact_api.py | 133 - glare/db/migration/__init__.py | 0 glare/db/migration/alembic.ini | 54 - glare/db/migration/alembic_migrations/README | 15 - glare/db/migration/alembic_migrations/env.py | 45 - .../alembic_migrations/script.py.mako | 37 - .../versions/001_initial_version.py | 167 -- .../versions/002_add_acquired_at_column.py | 57 - .../versions/003_add_database_blob_storage.py | 54 - glare/db/migration/migration.py | 86 - glare/db/sqlalchemy/__init__.py | 0 glare/db/sqlalchemy/api.py | 671 ----- glare/db/sqlalchemy/models.py | 271 -- glare/engine.py | 578 ----- glare/hacking/__init__.py | 0 glare/hacking/checks.py | 156 -- glare/i18n.py | 21 - glare/locking.py | 113 - glare/notification.py | 66 - glare/objects/__init__.py | 0 glare/objects/all.py | 50 - glare/objects/base.py | 609 ----- glare/objects/heat_environment.py | 31 - glare/objects/heat_template.py | 49 - glare/objects/image.py | 90 - glare/objects/meta/__init__.py | 18 - glare/objects/meta/fields.py | 184 -- glare/objects/meta/file_utils.py | 133 - glare/objects/meta/registry.py | 130 - glare/objects/meta/validators.py | 397 --- glare/objects/meta/wrappers.py | 267 -- glare/objects/murano_package.py | 60 - glare/objects/secret.py | 90 - glare/objects/tosca_template.py | 35 - glare/opts.py | 64 - glare/scrubber.py | 171 -- glare/store/__init__.py | 0 glare/store/base_api.py | 45 - glare/store/database.py | 33 - glare/tests/__init__.py | 0 glare/tests/etc/policy.json | 1 - glare/tests/functional/__init__.py | 688 ----- glare/tests/functional/base.py | 166 -- glare/tests/functional/test_all.py | 91 - glare/tests/functional/test_database_store.py | 172 -- .../tests/functional/test_sample_artifact.py | 2310 ----------------- glare/tests/functional/test_schemas.py | 950 ------- glare/tests/functional/test_scrubber.py | 144 - glare/tests/functional/test_visibility.py | 193 -- glare/tests/hooks_artifact.py | 118 - glare/tests/sample_artifact.py | 132 - glare/tests/unit/__init__.py | 27 - glare/tests/unit/api/__init__.py | 0 glare/tests/unit/api/test_create.py | 220 -- glare/tests/unit/api/test_delete.py | 199 -- glare/tests/unit/api/test_download.py | 136 - glare/tests/unit/api/test_list.py | 586 ----- glare/tests/unit/api/test_locations.py | 120 - glare/tests/unit/api/test_update.py | 675 ----- glare/tests/unit/api/test_upload.py | 281 -- glare/tests/unit/base.py | 167 -- glare/tests/unit/db/__init__.py | 0 glare/tests/unit/db/migrations/__init__.py | 0 .../unit/db/migrations/test_migrations.py | 258 -- glare/tests/unit/glare_fixtures.py | 40 - glare/tests/unit/middleware/__init__.py | 0 glare/tests/unit/middleware/test_context.py | 129 - glare/tests/unit/middleware/test_fault.py | 116 - .../unit/middleware/test_keycloak_auth.py | 152 -- .../unit/middleware/test_trusted_auth.py | 173 -- .../middleware/test_version_negotiations.py | 77 - glare/tests/unit/test_fixtures.py | 37 - glare/tests/unit/test_hacking.py | 154 -- glare/tests/unit/test_multistore.py | 38 - glare/tests/unit/test_utils.py | 281 -- glare/tests/unit/test_validation_hooks.py | 191 -- glare/tests/unit/test_validators.py | 368 --- glare/tests/unit/test_versions.py | 65 - glare/tests/unit/test_wsgi.py | 558 ---- glare/tests/utils.py | 376 --- glare/tests/var/certificate.crt | 92 - glare/tests/var/hooks.zip | Bin 818 -> 0 bytes glare/tests/var/privatekey.key | 51 - glare/version.py | 17 - glare/wsgi.py | 54 - glare_tempest_plugin/__init__.py | 0 glare_tempest_plugin/clients.py | 57 - glare_tempest_plugin/config.py | 38 - glare_tempest_plugin/contrib/gate_hook.sh | 14 - .../contrib/post_test_hook.sh | 14 - glare_tempest_plugin/contrib/pre_test_hook.sh | 14 - glare_tempest_plugin/plugin.py | 56 - glare_tempest_plugin/services/__init__.py | 0 .../services/artifacts/__init__.py | 0 .../services/artifacts/artifacts_client.py | 121 - glare_tempest_plugin/tests/__init__.py | 0 glare_tempest_plugin/tests/api/__init__.py | 0 glare_tempest_plugin/tests/api/base.py | 81 - .../tests/api/test_list_artifact.py | 40 - pylintrc | 27 - requirements.txt | 57 - setup.cfg | 69 - setup.py | 29 - test-requirements.txt | 38 - tools/test-setup.sh | 57 - tox.ini | 74 - 170 files changed, 14 insertions(+), 24234 deletions(-) delete mode 100644 .gitignore delete mode 100644 .gitreview delete mode 100644 .testr.conf delete mode 100644 LICENSE create mode 100644 README delete mode 100644 README.rst delete mode 100644 babel.cfg delete mode 100644 bandit.yaml delete mode 100644 devstack/README.rst delete mode 100644 devstack/plugin.sh delete mode 100644 devstack/settings delete mode 100644 doc/source/_static/.placeholder delete mode 100644 doc/source/_templates/sidebarlinks.html delete mode 100644 doc/source/_theme/layout.css delete mode 100644 doc/source/_theme/theme.conf delete mode 100644 doc/source/architecture.rst delete mode 100644 doc/source/conf.py delete mode 100644 doc/source/developer/creating_custom_artifact_type.rst delete mode 100644 doc/source/developer/devstack.rst delete mode 100644 doc/source/developer/index.rst delete mode 100644 doc/source/developer/troubleshooting.rst delete mode 100644 doc/source/developer/webapi/index.rst delete mode 100644 doc/source/developer/webapi/v1.rst delete mode 100644 doc/source/guides/configuration_guide.rst delete mode 100644 doc/source/guides/dashboard_guide.rst delete mode 100644 doc/source/guides/glareclient_guide.rst delete mode 100644 doc/source/guides/hooks_guide.rst delete mode 100644 doc/source/guides/installation_guide.rst delete mode 100644 doc/source/guides/upgrade_guide.rst delete mode 100644 doc/source/images/glare-architecture.png delete mode 100644 doc/source/images_src/glare-architecture.graphml delete mode 100644 doc/source/index.rst delete mode 100644 doc/source/main_features.rst delete mode 100644 doc/source/overview.rst delete mode 100644 doc/source/quickstart.rst delete mode 100644 etc/glare-paste.ini delete mode 100644 etc/glare-swift.conf.sample delete mode 100644 etc/oslo-config-generator/glare.conf delete mode 100644 glare/__init__.py delete mode 100644 glare/api/__init__.py delete mode 100644 glare/api/middleware/__init__.py delete mode 100644 glare/api/middleware/context.py delete mode 100644 glare/api/middleware/fault.py delete mode 100644 glare/api/middleware/keycloak_auth.py delete mode 100644 glare/api/middleware/version_negotiation.py delete mode 100644 glare/api/v1/__init__.py delete mode 100644 glare/api/v1/api_version_request.py delete mode 100644 glare/api/v1/api_versioning.py delete mode 100644 glare/api/v1/resource.py delete mode 100644 glare/api/v1/router.py delete mode 100644 glare/api/versions.py delete mode 100644 glare/cmd/__init__.py delete mode 100755 glare/cmd/api.py delete mode 100755 glare/cmd/db_manage.py delete mode 100644 glare/cmd/scrubber.py delete mode 100644 glare/common/__init__.py delete mode 100644 glare/common/config.py delete mode 100644 glare/common/exception.py delete mode 100644 glare/common/policy.py delete mode 100644 glare/common/semver_db.py delete mode 100644 glare/common/store_api.py delete mode 100644 glare/common/utils.py delete mode 100644 glare/common/wsgi.py delete mode 100644 glare/db/__init__.py delete mode 100644 glare/db/artifact_api.py delete mode 100644 glare/db/migration/__init__.py delete mode 100644 glare/db/migration/alembic.ini delete mode 100644 glare/db/migration/alembic_migrations/README delete mode 100644 glare/db/migration/alembic_migrations/env.py delete mode 100644 glare/db/migration/alembic_migrations/script.py.mako delete mode 100644 glare/db/migration/alembic_migrations/versions/001_initial_version.py delete mode 100644 glare/db/migration/alembic_migrations/versions/002_add_acquired_at_column.py delete mode 100644 glare/db/migration/alembic_migrations/versions/003_add_database_blob_storage.py delete mode 100644 glare/db/migration/migration.py delete mode 100644 glare/db/sqlalchemy/__init__.py delete mode 100644 glare/db/sqlalchemy/api.py delete mode 100644 glare/db/sqlalchemy/models.py delete mode 100644 glare/engine.py delete mode 100644 glare/hacking/__init__.py delete mode 100644 glare/hacking/checks.py delete mode 100644 glare/i18n.py delete mode 100644 glare/locking.py delete mode 100644 glare/notification.py delete mode 100644 glare/objects/__init__.py delete mode 100644 glare/objects/all.py delete mode 100644 glare/objects/base.py delete mode 100644 glare/objects/heat_environment.py delete mode 100644 glare/objects/heat_template.py delete mode 100644 glare/objects/image.py delete mode 100644 glare/objects/meta/__init__.py delete mode 100644 glare/objects/meta/fields.py delete mode 100644 glare/objects/meta/file_utils.py delete mode 100644 glare/objects/meta/registry.py delete mode 100644 glare/objects/meta/validators.py delete mode 100644 glare/objects/meta/wrappers.py delete mode 100644 glare/objects/murano_package.py delete mode 100644 glare/objects/secret.py delete mode 100644 glare/objects/tosca_template.py delete mode 100644 glare/opts.py delete mode 100644 glare/scrubber.py delete mode 100644 glare/store/__init__.py delete mode 100644 glare/store/base_api.py delete mode 100644 glare/store/database.py delete mode 100644 glare/tests/__init__.py delete mode 100644 glare/tests/etc/policy.json delete mode 100644 glare/tests/functional/__init__.py delete mode 100644 glare/tests/functional/base.py delete mode 100644 glare/tests/functional/test_all.py delete mode 100644 glare/tests/functional/test_database_store.py delete mode 100644 glare/tests/functional/test_sample_artifact.py delete mode 100644 glare/tests/functional/test_schemas.py delete mode 100644 glare/tests/functional/test_scrubber.py delete mode 100644 glare/tests/functional/test_visibility.py delete mode 100644 glare/tests/hooks_artifact.py delete mode 100644 glare/tests/sample_artifact.py delete mode 100644 glare/tests/unit/__init__.py delete mode 100644 glare/tests/unit/api/__init__.py delete mode 100644 glare/tests/unit/api/test_create.py delete mode 100644 glare/tests/unit/api/test_delete.py delete mode 100644 glare/tests/unit/api/test_download.py delete mode 100644 glare/tests/unit/api/test_list.py delete mode 100644 glare/tests/unit/api/test_locations.py delete mode 100644 glare/tests/unit/api/test_update.py delete mode 100644 glare/tests/unit/api/test_upload.py delete mode 100644 glare/tests/unit/base.py delete mode 100644 glare/tests/unit/db/__init__.py delete mode 100644 glare/tests/unit/db/migrations/__init__.py delete mode 100644 glare/tests/unit/db/migrations/test_migrations.py delete mode 100644 glare/tests/unit/glare_fixtures.py delete mode 100644 glare/tests/unit/middleware/__init__.py delete mode 100644 glare/tests/unit/middleware/test_context.py delete mode 100644 glare/tests/unit/middleware/test_fault.py delete mode 100644 glare/tests/unit/middleware/test_keycloak_auth.py delete mode 100644 glare/tests/unit/middleware/test_trusted_auth.py delete mode 100644 glare/tests/unit/middleware/test_version_negotiations.py delete mode 100644 glare/tests/unit/test_fixtures.py delete mode 100644 glare/tests/unit/test_hacking.py delete mode 100644 glare/tests/unit/test_multistore.py delete mode 100644 glare/tests/unit/test_utils.py delete mode 100644 glare/tests/unit/test_validation_hooks.py delete mode 100644 glare/tests/unit/test_validators.py delete mode 100644 glare/tests/unit/test_versions.py delete mode 100644 glare/tests/unit/test_wsgi.py delete mode 100644 glare/tests/utils.py delete mode 100644 glare/tests/var/certificate.crt delete mode 100644 glare/tests/var/hooks.zip delete mode 100644 glare/tests/var/privatekey.key delete mode 100644 glare/version.py delete mode 100644 glare/wsgi.py delete mode 100644 glare_tempest_plugin/__init__.py delete mode 100644 glare_tempest_plugin/clients.py delete mode 100644 glare_tempest_plugin/config.py delete mode 100644 glare_tempest_plugin/contrib/gate_hook.sh delete mode 100644 glare_tempest_plugin/contrib/post_test_hook.sh delete mode 100644 glare_tempest_plugin/contrib/pre_test_hook.sh delete mode 100644 glare_tempest_plugin/plugin.py delete mode 100644 glare_tempest_plugin/services/__init__.py delete mode 100644 glare_tempest_plugin/services/artifacts/__init__.py delete mode 100644 glare_tempest_plugin/services/artifacts/artifacts_client.py delete mode 100644 glare_tempest_plugin/tests/__init__.py delete mode 100644 glare_tempest_plugin/tests/api/__init__.py delete mode 100644 glare_tempest_plugin/tests/api/base.py delete mode 100644 glare_tempest_plugin/tests/api/test_list_artifact.py delete mode 100644 pylintrc delete mode 100644 requirements.txt delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100644 test-requirements.txt delete mode 100755 tools/test-setup.sh delete mode 100644 tox.ini diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 52daa4a..0000000 --- a/.gitignore +++ /dev/null @@ -1,97 +0,0 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -.installed.cfg -*.egg* - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.testrepository/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*,cover -.hypothesis/ -ChangeLog - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# IPython Notebook -.ipynb_checkpoints - -# pyenv -.python-version - -# celery beat schedule file -celerybeat-schedule - -# dotenv -.env - -# virtualenv -venv/ -ENV/ - -# Spyder project settings -.spyderproject - -# Rope project settings -.ropeproject - -# IDE files -.idea - -# Files created by doc build -AUTHORS -ChangeLog -doc/source/api diff --git a/.gitreview b/.gitreview deleted file mode 100644 index 28860b4..0000000 --- a/.gitreview +++ /dev/null @@ -1,4 +0,0 @@ -[gerrit] -host=review.openstack.org -port=29418 -project=openstack/glare.git diff --git a/.testr.conf b/.testr.conf deleted file mode 100644 index 4663d01..0000000 --- a/.testr.conf +++ /dev/null @@ -1,8 +0,0 @@ -[DEFAULT] -test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \ - OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \ - OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-160} \ - ${PYTHON:-python} -m subunit.run discover -t ./ ./glare/tests $LISTOPT $IDOPTION - -test_id_option=--load-list $IDFILE -test_list_option=--list diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 8dada3e..0000000 --- a/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/README b/README new file mode 100644 index 0000000..8fcd2b2 --- /dev/null +++ b/README @@ -0,0 +1,14 @@ +This project is no longer maintained. + +The contents of this repository are still available in the Git +source code management system. To see the contents of this +repository before it reached its end of life, please check out the +previous commit with "git checkout HEAD^1". + +For ongoing work on maintaining OpenStack packages in the Debian +distribution, please see the Debian OpenStack packaging team at +https://wiki.debian.org/OpenStack/. + +For any further questions, please email +openstack-dev@lists.openstack.org or join #openstack-dev on +Freenode. diff --git a/README.rst b/README.rst deleted file mode 100644 index 8abb6fd..0000000 --- a/README.rst +++ /dev/null @@ -1,18 +0,0 @@ -Glare -===== - -Glare (from GLare Artifact REpository) is a service that provides access to a -unified catalog of structured meta-information as well as related binary data -(these structures are also called 'artifacts'). - -* Get Started: https://github.com/openstack/glare/blob/master/doc/source/quickstart.rst -* Documentation: https://github.com/openstack/glare/blob/master/doc -* Source: https://git.openstack.org/cgit/openstack/glare -* Bugs: https://bugs.launchpad.net/glare -* Blueprints:** https://blueprints.launchpad.net/glare -* REST Client:** https://git.openstack.org/cgit/openstack/python-glareclient - -License -------- - -Apache License Version 2.0 http://www.apache.org/licenses/LICENSE-2.0 diff --git a/babel.cfg b/babel.cfg deleted file mode 100644 index efceab8..0000000 --- a/babel.cfg +++ /dev/null @@ -1 +0,0 @@ -[python: **.py] diff --git a/bandit.yaml b/bandit.yaml deleted file mode 100644 index 2e7b187..0000000 --- a/bandit.yaml +++ /dev/null @@ -1,245 +0,0 @@ -# optional: after how many files to update progress -#show_progress_every: 100 - -# optional: plugins directory name -#plugins_dir: 'plugins' - -# optional: plugins discovery name pattern -plugin_name_pattern: '*.py' - -# optional: terminal escape sequences to display colors -#output_colors: -# DEFAULT: '\033[0m' -# HEADER: '\033[95m' -# LOW: '\033[94m' -# MEDIUM: '\033[93m' -# HIGH: '\033[91m' - -# optional: log format string -#log_format: "[%(module)s]\t%(levelname)s\t%(message)s" - -# globs of files which should be analyzed -include: - - '*.py' - - '*.pyw' - -# a list of strings, which if found in the path will cause files to be excluded -# for example /tests/ - to remove all all files in tests directory -exclude_dirs: - - '/tests/' - -profiles: - gate: - include: - - - any_other_function_with_shell_equals_true - - assert_used - - blacklist_calls - - blacklist_import_func - - # One of the blacklisted imports is the subprocess module. Keystone - # has to import the subprocess module in a single module for - # eventlet support so in most cases bandit won't be able to detect - # that subprocess is even being imported. Also, Bandit's - # recommendation is just to check that the use is safe without any - # documentation on what safe or unsafe usage is. So this test is - # skipped. - # - blacklist_imports - - - exec_used - - - execute_with_run_as_root_equals_true - - # - hardcoded_bind_all_interfaces # TODO: enable this test - - # Not working because wordlist/default-passwords file not bundled, - # see https://bugs.launchpad.net/bandit/+bug/1451575 : - # - hardcoded_password - - # Not used because it's prone to false positives: - # - hardcoded_sql_expressions - - # - hardcoded_tmp_directory # TODO: enable this test - - - jinja2_autoescape_false - - - linux_commands_wildcard_injection - - - paramiko_calls - - - password_config_option_not_marked_secret - - request_with_no_cert_validation - - set_bad_file_permissions - - subprocess_popen_with_shell_equals_true - # - subprocess_without_shell_equals_true # TODO: enable this test - - start_process_with_a_shell - # - start_process_with_no_shell # TODO: enable this test - - start_process_with_partial_path - - ssl_with_bad_defaults - - ssl_with_bad_version - - ssl_with_no_version - # - try_except_pass # TODO: enable this test - - - use_of_mako_templates - -blacklist_calls: - bad_name_sets: - # - pickle: - # qualnames: [pickle.loads, pickle.load, pickle.Unpickler, - # cPickle.loads, cPickle.load, cPickle.Unpickler] - # message: "Pickle library appears to be in use, possible security issue." - # TODO: enable this test - - marshal: - qualnames: [marshal.load, marshal.loads] - message: "Deserialization with the marshal module is possibly dangerous." - # - md5: - # qualnames: [hashlib.md5, Crypto.Hash.MD2.new, Crypto.Hash.MD4.new, Crypto.Hash.MD5.new, cryptography.hazmat.primitives.hashes.MD5] - # message: "Use of insecure MD2, MD4, or MD5 hash function." - # TODO: enable this test - - mktemp_q: - qualnames: [tempfile.mktemp] - message: "Use of insecure and deprecated function (mktemp)." - - eval: - qualnames: [eval] - message: "Use of possibly insecure function - consider using safer ast.literal_eval." - - mark_safe: - names: [mark_safe] - message: "Use of mark_safe() may expose cross-site scripting vulnerabilities and should be reviewed." - - httpsconnection: - qualnames: [httplib.HTTPSConnection] - message: "Use of HTTPSConnection does not provide security, see https://wiki.openstack.org/wiki/OSSN/OSSN-0033" - - yaml_load: - qualnames: [yaml.load] - message: "Use of unsafe yaml load. Allows instantiation of arbitrary objects. Consider yaml.safe_load()." - - urllib_urlopen: - qualnames: [urllib.urlopen, urllib.urlretrieve, urllib.URLopener, urllib.FancyURLopener, urllib2.urlopen, urllib2.Request] - message: "Audit url open for permitted schemes. Allowing use of file:/ or custom schemes is often unexpected." - - random: - qualnames: [random.random, random.randrange, random.randint, random.choice, random.uniform, random.triangular] - message: "Standard pseudo-random generators are not suitable for security/cryptographic purposes." - level: "LOW" - - # Most of this is based off of Christian Heimes' work on defusedxml: - # https://pypi.python.org/pypi/defusedxml/#defusedxml-sax - - # TODO(jaegerandi): Enable once defusedxml is in global requirements. - #- xml_bad_cElementTree: - # qualnames: [xml.etree.cElementTree.parse, - # xml.etree.cElementTree.iterparse, - # xml.etree.cElementTree.fromstring, - # xml.etree.cElementTree.XMLParser] - # message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function." - #- xml_bad_ElementTree: - # qualnames: [xml.etree.ElementTree.parse, - # xml.etree.ElementTree.iterparse, - # xml.etree.ElementTree.fromstring, - # xml.etree.ElementTree.XMLParser] - # message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function." - - xml_bad_expatreader: - qualnames: [xml.sax.expatreader.create_parser] - message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function." - - xml_bad_expatbuilder: - qualnames: [xml.dom.expatbuilder.parse, - xml.dom.expatbuilder.parseString] - message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function." - - xml_bad_sax: - qualnames: [xml.sax.parse, - xml.sax.parseString, - xml.sax.make_parser] - message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function." - - xml_bad_minidom: - qualnames: [xml.dom.minidom.parse, - xml.dom.minidom.parseString] - message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function." - - xml_bad_pulldom: - qualnames: [xml.dom.pulldom.parse, - xml.dom.pulldom.parseString] - message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function." - - xml_bad_etree: - qualnames: [lxml.etree.parse, - lxml.etree.fromstring, - lxml.etree.RestrictedElement, - lxml.etree.GlobalParserTLS, - lxml.etree.getDefaultParser, - lxml.etree.check_docinfo] - message: "Using {func} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {func} with it's defusedxml equivilent function." - - -shell_injection: - # Start a process using the subprocess module, or one of its wrappers. - subprocess: [subprocess.Popen, subprocess.call, subprocess.check_call, - subprocess.check_output, utils.execute, utils.execute_with_timeout] - # Start a process with a function vulnerable to shell injection. - shell: [os.system, os.popen, os.popen2, os.popen3, os.popen4, - popen2.popen2, popen2.popen3, popen2.popen4, popen2.Popen3, - popen2.Popen4, commands.getoutput, commands.getstatusoutput] - # Start a process with a function that is not vulnerable to shell injection. - no_shell: [os.execl, os.execle, os.execlp, os.execlpe, os.execv,os.execve, - os.execvp, os.execvpe, os.spawnl, os.spawnle, os.spawnlp, - os.spawnlpe, os.spawnv, os.spawnve, os.spawnvp, os.spawnvpe, - os.startfile] - -blacklist_imports: - bad_import_sets: - - telnet: - imports: [telnetlib] - level: HIGH - message: "Telnet is considered insecure. Use SSH or some other encrypted protocol." - - info_libs: - imports: [pickle, cPickle, subprocess, Crypto] - level: LOW - message: "Consider possible security implications associated with {module} module." - - # Most of this is based off of Christian Heimes' work on defusedxml: - # https://pypi.python.org/pypi/defusedxml/#defusedxml-sax - - - xml_libs: - imports: [xml.etree.cElementTree, - xml.etree.ElementTree, - xml.sax.expatreader, - xml.sax, - xml.dom.expatbuilder, - xml.dom.minidom, - xml.dom.pulldom, - lxml.etree, - lxml] - message: "Using {module} to parse untrusted XML data is known to be vulnerable to XML attacks. Replace {module} with the equivilent defusedxml package." - level: LOW - - xml_libs_high: - imports: [xmlrpclib] - message: "Using {module} to parse untrusted XML data is known to be vulnerable to XML attacks. Use defused.xmlrpc.monkey_patch() function to monkey-patch xmlrpclib and mitigate XML vulnerabilities." - level: HIGH - -hardcoded_tmp_directory: - tmp_dirs: ['/tmp', '/var/tmp', '/dev/shm'] - -hardcoded_password: - # Support for full path, relative path and special "%(site_data_dir)s" - # substitution (/usr/{local}/share) - word_list: "%(site_data_dir)s/wordlist/default-passwords" - -ssl_with_bad_version: - bad_protocol_versions: - - 'PROTOCOL_SSLv2' - - 'SSLv2_METHOD' - - 'SSLv23_METHOD' - - 'PROTOCOL_SSLv3' # strict option - - 'PROTOCOL_TLSv1' # strict option - - 'SSLv3_METHOD' # strict option - - 'TLSv1_METHOD' # strict option - -password_config_option_not_marked_secret: - function_names: - - oslo.config.cfg.StrOpt - - oslo_config.cfg.StrOpt - -execute_with_run_as_root_equals_true: - function_names: - - ceilometer.utils.execute - - cinder.utils.execute - - neutron.agent.linux.utils.execute - - nova.utils.execute - - nova.utils.trycmd - -try_except_pass: - check_typed_exception: True diff --git a/devstack/README.rst b/devstack/README.rst deleted file mode 100644 index 3c1e05e..0000000 --- a/devstack/README.rst +++ /dev/null @@ -1,24 +0,0 @@ -==================== -Enabling in Devstack -==================== - -1. Download DevStack:: - - git clone https://github.com/openstack-dev/devstack.git - cd devstack - -2. Add this repo as an external repository:: - - > cat local.conf - [[local|localrc]] - enable_plugin glare https://github.com/openstack/glare - - .. note:: - To enable installation of glare client from git repo instead of pypi execute - a shell command: - - .. code-block:: bash - - export LIBS_FROM_GIT+=python-glareclient - -3. run ``stack.sh`` diff --git a/devstack/plugin.sh b/devstack/plugin.sh deleted file mode 100644 index d0841a8..0000000 --- a/devstack/plugin.sh +++ /dev/null @@ -1,218 +0,0 @@ -#!/usr/bin/env bash -# Plugin file for Glare services -# ------------------------------- - -# Dependencies: -# ``functions`` file -# ``DEST``, ``DATA_DIR``, ``STACK_USER`` must be defined - -# Save trace setting -XTRACE=$(set +o | grep xtrace) -set -o xtrace - -echo_summary "glare's plugin.sh was called..." -# create_glare_accounts() - Set up common required glare accounts -# -# Tenant User Roles -# ------------------------------ -# service glare admin -function create_glare_accounts() { - create_service_user "glare" - - # required for swift access - if is_service_enabled s-proxy; then - create_service_user "glare-swift" "ResellerAdmin" - fi - - get_or_create_service "glare" "artifact" "Artifact repository" - get_or_create_endpoint "artifact" \ - "$REGION_NAME" \ - "$GLARE_SERVICE_PROTOCOL://$GLARE_SERVICE_HOST:$GLARE_SERVICE_PORT" \ - "$GLARE_SERVICE_PROTOCOL://$GLARE_SERVICE_HOST:$GLARE_SERVICE_PORT" \ - "$GLARE_SERVICE_PROTOCOL://$GLARE_SERVICE_HOST:$GLARE_SERVICE_PORT" -} - - -function mkdir_chown_stack { - if [[ ! -d "$1" ]]; then - sudo mkdir -p "$1" - fi - sudo chown $STACK_USER "$1" -} - - -function configure_glare { - - # create and clean up auth cache dir - mkdir_chown_stack "$GLARE_AUTH_CACHE_DIR" - rm -f "$GLARE_AUTH_CACHE_DIR"/* - - mkdir_chown_stack "$GLARE_CONF_DIR" - - # Generate Glare configuration file and configure common parameters. - oslo-config-generator --config-file $GLARE_DIR/etc/oslo-config-generator/glare.conf --output-file $GLARE_CONF_FILE - - # Glare Configuration - #------------------------- - - iniset $GLARE_CONF_FILE DEFAULT debug $GLARE_DEBUG - - # Specify additional modules with external artifact types - if [ -n "$GLARE_CUSTOM_MODULES" ]; then - iniset $GLARE_CONF_FILE DEFAULT custom_artifact_types_modules $GLARE_CUSTOM_MODULES - fi - - # Specify a list of enabled artifact types - if [ -n "$GLARE_ENABLED_TYPES" ]; then - iniset $GLARE_CONF_FILE DEFAULT enabled_artifact_types $GLARE_ENABLED_TYPES - fi - - oslopolicy-sample-generator --namespace=glare --output-file=$GLARE_POLICY_FILE - sed -i 's/^#"//' $GLARE_POLICY_FILE - - cp -p $GLARE_DIR/etc/glare-paste.ini $GLARE_CONF_DIR - - iniset $GLARE_CONF_FILE paste_deploy flavor $GLARE_FLAVOR - - # Setup keystone_authtoken section - configure_auth_token_middleware $GLARE_CONF_FILE glare $GLARE_AUTH_CACHE_DIR - - # Setup RabbitMQ credentials - iniset $GLARE_CONF_FILE oslo_messaging_rabbit rabbit_userid $RABBIT_USERID - iniset $GLARE_CONF_FILE oslo_messaging_rabbit rabbit_password $RABBIT_PASSWORD - - # Enable notifications support - iniset $GLARE_CONF_FILE oslo_messaging_notifications driver messaging - - # Configure the database. - iniset $GLARE_CONF_FILE database connection `database_connection_url glare` - iniset $GLARE_CONF_FILE database max_overflow -1 - iniset $GLARE_CONF_FILE database max_pool_size 1000 - - # Path of policy.yaml file. - iniset $GLARE_CONF_FILE oslo_policy policy_file $GLARE_POLICY_FILE - - if [ "$LOG_COLOR" == "True" ] && [ "$SYSLOG" == "False" ]; then - setup_colorized_logging $GLARE_CONF_FILE DEFAULT tenant user - fi - - if [ "$GLARE_RPC_IMPLEMENTATION" ]; then - iniset $GLARE_CONF_FILE DEFAULT rpc_implementation $GLARE_RPC_IMPLEMENTATION - fi - - # Configuring storage - iniset $GLARE_CONF_FILE glance_store filesystem_store_datadir $GLARE_ARTIFACTS_DIR - - # Store the artifacts in swift if enabled. - if is_service_enabled s-proxy; then - GLARE_SWIFT_STORE_CONF=$GLARE_CONF_DIR/glare-swift-store.conf - cp -p $GLARE_DIR/etc/glare-swift.conf.sample $GLARE_CONF_DIR - - iniset $GLARE_CONF_FILE glance_store default_store swift - iniset $GLARE_CONF_FILE glance_store swift_store_create_container_on_put True - - iniset $GLARE_CONF_FILE glance_store swift_store_config_file $GLARE_SWIFT_STORE_CONF - iniset $GLARE_CONF_FILE glance_store default_swift_reference ref1 - iniset $GLARE_CONF_FILE glance_store stores "file, http, swift" - - iniset $GLARE_SWIFT_STORE_CONF ref1 user $SERVICE_PROJECT_NAME:glare-swift - - iniset $GLARE_SWIFT_STORE_CONF ref1 key $SERVICE_PASSWORD - iniset $GLARE_SWIFT_STORE_CONF ref1 auth_address $KEYSTONE_SERVICE_URI/v3 - iniset $GLARE_SWIFT_STORE_CONF ref1 user_domain_name $SERVICE_DOMAIN_NAME - iniset $GLARE_SWIFT_STORE_CONF ref1 project_domain_name $SERVICE_DOMAIN_NAME - iniset $GLARE_SWIFT_STORE_CONF ref1 auth_version 3 - - # commenting is not strictly necessary but it's confusing to have bad values in conf - inicomment $GLARE_CONF_FILE glance_store swift_store_user - inicomment $GLARE_CONF_FILE glance_store swift_store_key - inicomment $GLARE_CONF_FILE glance_store swift_store_auth_address - fi -} - - -# init_glare - Initialize the database -function init_glare { - # Delete existing artifacts - rm -rf $GLARE_ARTIFACTS_DIR - mkdir -p $GLARE_ARTIFACTS_DIR - - # (re)create Glare database - recreate_database glare utf8 - - # Migrate glare database - $GLARE_BIN_DIR/glare-db-manage --config-file $GLARE_CONF_FILE upgrade -} - - -# install_glare - Collect source and prepare -function install_glare { - setup_develop $GLARE_DIR -} - - -function install_glare_pythonclient { - if use_library_from_git "python-glareclient"; then - git_clone $GLARE_PYTHONCLIENT_REPO $GLARE_PYTHONCLIENT_DIR $GLARE_PYTHONCLIENT_BRANCH - setup_develop $GLARE_PYTHONCLIENT_DIR - else - # nothing actually "requires" glareclient, so force installation from pypi - pip_install_gr python-glareclient - fi -} - - -# start_glare - Start running processes, including screen -function start_glare { - run_process glare "$GLARE_BIN_DIR/glare-api --config-file $GLARE_CONF_DIR/glare.conf" -} - - -# stop_glare - Stop running processes -function stop_glare { - # Kill the Glare screen windows - for serv in glare-api; do - stop_process $serv - done -} - - -function cleanup_glare { - sudo rm -rf $GLARE_ARTIFACTS_DIR $GLARE_AUTH_CACHE_DIR -} - - -if is_service_enabled glare; then - if [[ "$1" == "stack" && "$2" == "install" ]]; then - echo_summary "Installing glare" - install_glare - install_glare_pythonclient - elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then - echo_summary "Configuring glare" - create_glare_accounts - configure_glare - elif [[ "$1" == "stack" && "$2" == "extra" ]]; then - echo_summary "Initializing glare" - init_glare - echo_summary "Starting Glare process" - start_glare - fi - - if [[ "$1" == "unstack" ]]; then - echo_summary "Shutting down glare" - stop_glare - fi - - if [[ "$1" == "clean" ]]; then - echo_summary "Cleaning glare" - cleanup_glare - fi -fi - - -# Restore xtrace -$XTRACE - -# Local variables: -# mode: shell-script -# End: diff --git a/devstack/settings b/devstack/settings deleted file mode 100644 index dcf1473..0000000 --- a/devstack/settings +++ /dev/null @@ -1,44 +0,0 @@ -# Devstack settings - -enable_service glare - -# Set up default directories -GLARE_PYTHONCLIENT_REPO=${GLARE_PYTHONCLIENT_REPO:-${GIT_BASE}/openstack/python-glareclient.git} -GLARE_PYTHONCLIENT_BRANCH=${GLARE_PYTHONCLIENT_BRANCH:-master} -GLARE_PYTHONCLIENT_DIR=$DEST/python-glareclient - -GLARE_DIR=$DEST/glare -GLARE_REPO=${GLARE_REPO:-${GIT_BASE}/openstack/glare.git} -GLARE_BRANCH=${GLARE_BRANCH:-master} - -# Glare virtual environment -if [[ ${USE_VENV} = True ]]; then - PROJECT_VENV["glare"]=${GLARE_DIR}.venv - GLARE_BIN_DIR=${PROJECT_VENV["glare"]}/bin -else - GLARE_BIN_DIR=$(get_python_exec_prefix) -fi - -GLARE_ARTIFACTS_DIR=${GLARE_ARTIFACTS_DIR:=$DATA_DIR/glare/artifacts} -GLARE_AUTH_CACHE_DIR=${GLARE_AUTH_CACHE_DIR:-/var/cache/glare} - -GLARE_CONF_DIR=${GLARE_CONF_DIR:-/etc/glare} -GLARE_CONF_FILE=$GLARE_CONF_DIR/glare.conf -GLARE_PASTE_INI=$GLARE_CONF_DIR/glare-paste.ini -GLARE_POLICY_FILE=$GLARE_CONF_DIR/policy.yaml -GLARE_SWIFT_STORE_CONF=$GLARE_CONF_DIR/glare-swift-store.conf - -if is_ssl_enabled_service "glare" || is_service_enabled tls-proxy; then - GLARE_SERVICE_PROTOCOL="https" -fi - -# Glare connection info. Note the port must be specified. -GLARE_SERVICE_PORT=${GLARE_SERVICE_PORT:-9494} -GLARE_SERVICE_HOST=${GLARE_SERVICE_HOST:-$SERVICE_HOST} -GLARE_SERVICE_PROTOCOL=${GLARE_SERVICE_PROTOCOL:-$SERVICE_PROTOCOL} - -GLARE_DEBUG=${GLARE_DEBUG:-True} - -GLARE_ADMIN_USER=${GLARE_ADMIN_USER:-glare} - -GLARE_FLAVOR=${GLARE_FLAVOR:-keystone} diff --git a/doc/source/_static/.placeholder b/doc/source/_static/.placeholder deleted file mode 100644 index e69de29..0000000 diff --git a/doc/source/_templates/sidebarlinks.html b/doc/source/_templates/sidebarlinks.html deleted file mode 100644 index 6ecd4dd..0000000 --- a/doc/source/_templates/sidebarlinks.html +++ /dev/null @@ -1,11 +0,0 @@ -

Useful Links

- - -{% if READTHEDOCS %} - -{% endif %} diff --git a/doc/source/_theme/layout.css b/doc/source/_theme/layout.css deleted file mode 100644 index 4de226a..0000000 --- a/doc/source/_theme/layout.css +++ /dev/null @@ -1,4 +0,0 @@ -{% extends "basic/layout.html" %} -{% set css_files = css_files + ['_static/tweaks.css'] %} - -{% block relbar1 %}{% endblock relbar1 %} diff --git a/doc/source/_theme/theme.conf b/doc/source/_theme/theme.conf deleted file mode 100644 index 99ba97b..0000000 --- a/doc/source/_theme/theme.conf +++ /dev/null @@ -1,4 +0,0 @@ -[theme] -inherit = nature -stylesheet = nature.css -pygments_style = tango diff --git a/doc/source/architecture.rst b/doc/source/architecture.rst deleted file mode 100644 index 15738b6..0000000 --- a/doc/source/architecture.rst +++ /dev/null @@ -1,93 +0,0 @@ -.. - Copyright 2017 - Nokia Networks - All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -================== -Basic architecture -================== - -OpenStack Glare has a client-server architecture that provides a Unified REST API, -which then transfers control to the appropriate artifact type. The API consists of - * *Router*, that converts WSGI requests into appropriate Glare API methods; - * *Deserializer*, that parses parameters from user input and performs initial - validation checks; - * *Controller*, which is responsible for interactions with Glare Engine; - * *Serializer*, that prepares information for responses (inserts status code, - content-type, response content length, and so on). - -But before the requests reach the API they have to pass trough the set of -middlewares, and each performs some actions over Request or Response objects. -For example, *Auth* middleware checks that authentication token provided in request -header is valid by sending auth requests to Identity service, obtains user -information and injects it in Request object as a context objects; -*Fault middleware*, on the other hand, is responsible for converting inner -Glare exceptions to appropriate http error codes. - -Almost all business logic is provided by Glare *Engine*. It is responsible -for *Policy* checking, when operator may define what operations users may execute, -based on their contexts; for sending broadcast *Notifications* about performed -actions; then it is *Access Control*, when Engine checks if user has rights to -modify desired artifact; and finally – *Locking*, that is used to prevent race -conditions during artifact updates, when the artifact is locked until the -modification operation is finished. - -All the file (Blob data) operations are performed using -*glance_store* library, which is responsible for interaction with external -storage back ends and (or) local filesystem(s). The glance_store library -provides a uniform interface to access the backend stores. Also there is -an adapter layer *Store Manager* between Engine and glance_store that is -responsible for converting glance_store exceptions and adding some additional -logic, like sha256 calculation. - -All database operations are organized with artifact types. Each type installed -in the system must implement Glare Artifact Type Interface (GATI) and use -appropriate data types to describe its attributes. - -Glare uses several data types from a declarative framework *oslo.versionedobjects*: -Integer, Float, String, Boolean, which complemented with the following home-grown -data types: - * Version — specifies the version of the artifact in ‘SemVer’ format and - implements comparison operations. - * Dependency — sets a reference to another artifact. At the request of the - ‘dependency’ field, Glare will get the dependent artifact meta-information. - * Blob — specifies a binary object. When a user assigns a value to this field, - data will be automatically redirected to one of the connected storages. - * List and Dict — define complex data structures such as Lists and Dictionaries - of primitive types respectively. - -*Base artifact type* is an abstract class that has a reference implementation -of GATI. It contains only common fields, like "id", "name", "version", -"created_at”, "owner", and so on. - -Each artifact type is inherited from the Base and adds some additional fields. -For example, for Image artifact type there were added "container_format" and -"disk_format" string fields, for Heat Template it was "nested_templates" Blob -Dictionary. - -*Validators* are objects that can be attached to a filed to perform additional -checks. For example, if validator MinLen(1) is attached to a string field it -checks that the string value is non empty. Validator ForbiddenChars("/", ",") -validates that there shouldn't be slashes and commas in the string. - -Glare uses a central *Database* that is shared amongst all -the components in the system and is sql-based by default. Other types -of database backends are somewhat supported and used by operators -but are not extensively tested upstream. - -.. figure:: ./images/glare-architecture.png - :figwidth: 100% - :align: center - -.. centered:: Image 1. OpenStack Glare Architecture diff --git a/doc/source/conf.py b/doc/source/conf.py deleted file mode 100644 index 2b0d6f7..0000000 --- a/doc/source/conf.py +++ /dev/null @@ -1,257 +0,0 @@ -# Copyright (c) 2010 OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# -# Glare documentation build configuration file, created by -# sphinx-quickstart on Tue May 18 13:50:15 2010. -# -# This file is execfile()'d with the current directory set to its containing -# dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import os -import subprocess -import sys -import warnings - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path = [ - os.path.abspath('../..'), - os.path.abspath('../../bin') - ] + sys.path - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.coverage', - 'sphinx.ext.ifconfig', - 'sphinx.ext.graphviz', - 'oslosphinx', - 'stevedore.sphinxext', - 'oslo_config.sphinxext', - 'sphinx.ext.autodoc', - 'sphinx.ext.viewcode', - 'oslo_config.sphinxconfiggen', - ] - -config_generator_config_file = [ - ('../../etc/oslo-config-generator/glare.conf', - '_static/glare'), -] - -# Add any paths that contain templates here, relative to this directory. -# templates_path = [] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'Glare' -copyright = u'2016-present, OpenStack Foundation.' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -from glare.version import version_info as glare_version -# The full version, including alpha/beta/rc tags. -release = glare_version.version_string_with_vcs() -# The short X.Y version. -version = glare_version.canonical_version_string() - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of documents that shouldn't be included in the build. -#unused_docs = [] - -# List of directories, relative to source directory, that shouldn't be searched -# for source files. -#exclude_trees = ['api'] -exclude_patterns = [ - # The man directory includes some snippet files that are included - # in other documents during the build but that should not be - # included in the toctree themselves, so tell Sphinx to ignore - # them when scanning for input files. - 'man/footer.rst', - 'man/general_options.rst', - 'man/openstack_options.rst', -] - -# The reST default role (for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -show_authors = True - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -modindex_common_prefix = ['glare.'] - -# -- Options for man page output -------------------------------------------- - -# Grouping the document tree for man pages. -# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual' - -man_pages = [] - - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. Major themes that come with -# Sphinx are currently 'default' and 'sphinxdoc'. -# html_theme_path = ["."] -# html_theme = '_theme' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = ['_theme'] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -html_title = 'Glare' - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' -git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'", "--date=local", - "-n1"] -try: - html_last_updated_fmt = subprocess.check_output(git_cmd).decode('utf-8') -except Exception: - warnings.warn('Cannot get last updated time from git repository. ' - 'Not setting "html_last_updated_fmt".') - - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -html_use_modindex = True - -# If false, no index is generated. -html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = '' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'glareedoc' - - -# -- Options for LaTeX output ------------------------------------------------ - -# The paper size ('letter' or 'a4'). -#latex_paper_size = 'letter' - -# The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, -# documentclass [howto/manual]). -#latex_documents = [ -# ('index', 'Glare.tex', u'Glare Documentation', -# u'Glare Team', 'manual'), -#] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# Additional stuff for the LaTeX preamble. -#latex_preamble = '' - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_use_modindex = True diff --git a/doc/source/developer/creating_custom_artifact_type.rst b/doc/source/developer/creating_custom_artifact_type.rst deleted file mode 100644 index 4fd2a3f..0000000 --- a/doc/source/developer/creating_custom_artifact_type.rst +++ /dev/null @@ -1,174 +0,0 @@ -How to create new Artifact Type -=============================== - -Basics ------- - -Each artifact type must realize **Glare Artifact Type Interface** (GATI) -and be inherited from ``glare.objects.base.BaseArtifact`` class. -GATI obliges to specify only one class method – ``get_type_name`` -that returns a string with unique artifact type name. Other methods -and fields are optional. - -.. note:: - - Conventionally it is recommended to give names in the plural, in - lowercase, with words separated by underscores. - -Example of code for minimal artifact type: - - .. code-block:: python - - from glare.objects import base - - class HelloWorld(base.BaseArtifact): - @classmethod - def get_type_name(cls): - return "hello_worlds" - -Custom artifact fields ----------------------- - -Users can add type specific fields to their artifact type to extend -its logic and functionality. Follow the requirements of -oslo.versionedobjects library all new fields must be placed in class -dictionary attribute called ``fields``: - - .. code-block:: python - - from glare.objects import base - - class HelloWorld(base.BaseArtifact): - ... - fields = {...} - -There is a large number of possible field options. Let’s look at the -most popular ones. - -Fields of primitive types -^^^^^^^^^^^^^^^^^^^^^^^^^ - -Users are allowed to create additional fields of 5 primitive types: - * IntegerField - * FloatField - * FlexibleBooleanField - * StringField - * Link - -First four are taken from oslo.versionedobjects directly, Link is a -glare-specific field which stores links in specific format to other -artifacts in the system. - -.. note:: - - It’s recommended to use FlexibleBoolean field instead of just - Boolean, because it has more sophisticated coercing. For instance, - it accepts string parameters like “true”, “yes”, “1” and so on, - and successfully coerces it to boolean value True. - -Users can create their own fields with method ``init`` from Attribute class. -This method’s first parameter must be an appropriate field class, other -parameters are optional and will be discussed later. In next example we -will create 5 new custom fields, one for each primitive type: - - .. code-block:: python - - from oslo_versionedobjects import fields - - from glare.objects import base - from glare.objects.meta import wrappers - from glare.objects.meta import fields as glare_fields - - Field = wrappers.Field.init - - class HelloWorld(base.BaseArtifact): - @classmethod - def get_type_name(cls): - return "hello_worlds" - - fields = { - 'my_int': Field(fields.IntegerField), - 'my_float': Field(fields.FloatField), - 'my_bool': Field(fields.FlexibleBooleanField), - 'my_string': Field(fields.StringField), - 'my_link': Field(glare_fields.Link) - } - -Compound types -^^^^^^^^^^^^^^ - -There are two collections, that may contain fields of primitive types: -*List* and *Dict*. Fields of compound types are created with method ``init`` -of classes ListAttribute and DictAttribute respectively. -Unlike Attribute class’ ``init``, this method takes field type class as -a first parameter, but not just field class. So, *IntegerField* must be changed -to *Integer*, *FloatField* to *Float*, and so on. Finally for collection of -links user should use *LinkType*. Let’s add several new compound fields to -*HelloWorld* class. - - .. code-block:: python - - from oslo_versionedobjects import fields - - from glare.objects import base - from glare.objects.meta import wrappers - from glare.objects.meta import fields as glare_fields - - Field = wrappers.Field.init - Dict = wrappers.DictField.init - List = wrappers.ListField.init - - class HelloWorld(base.BaseArtifact): - @classmethod - def get_type_name(cls): - return "hello_worlds" - - fields = { - ... - 'my_list_of_str': List(fields.String), - 'my_dict_of_int': Dict(fields.Integer), - 'my_list_of_float': List(fields.Float), - 'my_dict_of_bools': Dict(fields.FlexibleBoolean), - 'my_list_of_links': List(glare_fields.LinkType) - } - -Other parameters, like collection max size, possible item values, -and so on, also can be specified with additional parameters to ``init`` -method. They will be discussed later. - -Blob and Folder types -^^^^^^^^^^^^^^^^^^^^^ - -The most interesting fields in glare framework are *Blob* and -*Folder* (or *BlobDict*). These fields allow users to work binary data, -which is stored in a standalone cloud storage, like Swift or Ceph. -The difference between Blob and Folder is that Blob sets unique endpoint -and may contain only one binary object, on the other hand Folder may -contain lots of binaries with names specified by user. - -Example of Blob and Folder fields: - - .. code-block:: python - - from oslo_versionedobjects import fields - - from glare.objects import base - from glare.objects.meta import wrappers - from glare.objects.meta import fields as glare_fields - - Field = wrappers.Field.init - Dict = wrappers.DictField.init - List = wrappers.ListField.init - Blob = wrappers.BlobField.init - Folder = wrappers.FolderField.init - - class HelloWorld(base.BaseArtifact): - @classmethod - def get_type_name(cls): - return "hello_worlds" - - fields = { - ... - 'my_blob': Blob(), - 'my_folder': Folder(), - } diff --git a/doc/source/developer/devstack.rst b/doc/source/developer/devstack.rst deleted file mode 100644 index a5f19fe..0000000 --- a/doc/source/developer/devstack.rst +++ /dev/null @@ -1,4 +0,0 @@ -Glare Devstack Installation -=========================== - -TBD \ No newline at end of file diff --git a/doc/source/developer/index.rst b/doc/source/developer/index.rst deleted file mode 100644 index ea9a394..0000000 --- a/doc/source/developer/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -Developer's Reference -===================== - -.. toctree:: - :maxdepth: 3 - - webapi/index - creating_custom_artifact_type - devstack - troubleshooting diff --git a/doc/source/developer/troubleshooting.rst b/doc/source/developer/troubleshooting.rst deleted file mode 100644 index a0670d5..0000000 --- a/doc/source/developer/troubleshooting.rst +++ /dev/null @@ -1,4 +0,0 @@ -Troubleshooting And Debugging -============================= - -TBD diff --git a/doc/source/developer/webapi/index.rst b/doc/source/developer/webapi/index.rst deleted file mode 100644 index 55962b8..0000000 --- a/doc/source/developer/webapi/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -REST API Specification -====================== - -.. toctree:: - :maxdepth: 2 - - v1 diff --git a/doc/source/developer/webapi/v1.rst b/doc/source/developer/webapi/v1.rst deleted file mode 100644 index 6fe233a..0000000 --- a/doc/source/developer/webapi/v1.rst +++ /dev/null @@ -1,1331 +0,0 @@ -V1 API -====== - -This API describes the different ways of interacting with Glare service via HTTP protocol -using Representational State Transfer concept (ReST). - -**Glossary** - -* *Glare* (from GLare Artifact REpository) - a service that provides access - to a unified catalog of immutable objects with structured meta-information as - well as related binary data (these structures are also called *'artifacts'*). - Glare controls artifact consistency and guaranties that binary data and - fields won't change during artifact lifetime. - - .. note:: - - Artifact type developer can declare fields whose values may be - changed, but he has to do it explicitly, because by default all fields - are considered as immutable. - -* *Artifact* - in terms of Glare, an Artifact is a structured immutable object - with some fields, related binary data, and metadata. - -* *Artifact Version* - field of an artifact that defines its version in SemVer - format. - -* *Artifact type* - defines artifact structure of both its binary data and - fields. Examples of OpenStack artifact types that will be supported - in Glare are: Heat templates, Murano Packages, Nova Images, Tacker VNFs and - so on. All artifact types are inherited from abstract Base type and extended - with new fields. Base type is inherited from Base class - from oslo_versionedobjects library (oslo_vo). - -* *Artifact status* - specifies the state of the artifact and the possible - actions that can be done with it. List of possible artifact statuses: - - * *drafted* - Artifact is created but not activated, so it can be changed by - Artifact owner or Administrator. - - * *active* - Artifact is activated and marked as ready for usage. - Only mutable fields can be changed since that. - - * *deactivated* - Artifact is not available to other users except - administrators. Used when Cloud Admin need to check the artifact. - - * *deleted* - Artifact's deleted. - -.. list-table:: **Artifact status transition table** - :header-rows: 1 - - * - Artifacts Status - - drafted - - active - - deactivated - - deleted - - * - **drafted** - - X - - activate Artifact - - N/A - - delete Artifact - - * - **active** - - N/A - - X - - deactivate Artifact - - delete Artifact - - * - **deactivated** - - N/A - - reactivate Artifact - - X - - delete Artifact - - * - **deleted** - - N/A - - N/A - - N/A - - X - - -* *Artifact Field* - field of an artifact that defines some information - about the artifact. Artifact fields always have name, type, value and - several additional parameters, described below. - - Glare uses several primitive types from oslo.versionedobjects directly: - - * *String*; - - * *Integer*; - - * *Float*; - - * *Boolean*; - - And also Glare expands this list with custom types: - - * *Blob*; - - * *Link*; - - * Structured generic types *Dict* or *List*. - - Each field has additional properties: - - * **required_on_activate** - boolean value indicating if the field value - should be specified for the artifact before activation. (Default: True) - - * **mutable** - boolean value indicating if the field value may be changed - after the artifact is activated. (Default: False) - - * **system** - boolean value indicating if the field value cannot be edited - by User. (Default: False) - - * **sortable** - boolean value indicating if there is a possibility to sort by - this field's values. (Default: False) - - .. note:: - - Only the fields of 4 primitive types may be sortable: integer, string, float - and boolean. - - * **nullable** - boolean value indicating if field's value can be empty - (Default: True). - - * **default** - a default value for the field may be specified by the Artifact - Type. (Default: None) - - * **validators** - a list of objects. When a user sets a value to the field with - additional validators, Glare applies them before setting the value and - raises `ValueError` if at least one of the validator requirements is not - satisfied. - - * **filter_ops** - a list of available filter operators for the field. There - are seven available operators: 'eq', 'neq', 'lt', 'lte', 'gt', 'gte', 'in'. - -* *Artifact Link* - field type that defines soft dependency of the - Artifact from another Artifact. It is an url that allows user to obtain - some Artifact data. For external links the format is the following: - *http(s):///* - For internal links its value contains only . - Example of : - ``/artifacts//`` - -* *Artifact Blob* - field type that defines binary data for Artifact. - User can download Artifact blob from Glare. Each blob field has a flag - *external*, that indicates if the field was created during file upload - (False) or by direct user request (True). In other words, “external” means - that blob field url is just a reference to some external file and Glare - does not manage the blob operations in that case. - Json schema that defines blob format: - - .. code-block:: javascript - - { - "type": "object", - "properties": { - "url": {"type": ["string", "null"], "format": "uri", - "maxLength": 2048}, - "size": {"type": ["number", "null"]}, - "md5": {"type": ["string", "null"]}, - "sha1": {"type": ["string", "null"]}, - "sha256": {"type": ["string", "null"]}, - "external": {"type": "boolean"}, - "id": {"type": "string"}, - "status": {"type": "string", - "enum": ["saving", "active"]}, - "content_type": {"type": ["string", "null"]}, - }, - "required": ["url", "size", "md5", "sha1", "sha256", "external", - "status", "id", "content_type"] - } - - Artifact blob fields may have the following statuses: - - * *saving* - Artifact blob record created in table, blob upload started. - - * *active* - blob upload successfully finished. - -.. list-table:: **Blob status transition table** - :header-rows: 1 - - * - Blob Status - - saving - - active - - * - **saving** - - X - - finish blob upload - - * - **active** - - N/A - - X - -* *Artifact Dict and List* - compound generic field types that - implement Dict or List interfaces respectively, and contain values of some - primitive type, defined by `element_type` attribute. - -* *Artifact visibility* - defines who may have an access to Artifact. - Initially there are 2 options: - - * `private` artifact is accessible by its owner and - admin only. When artifact is 'drafted' its visibility is always `private`. - - * `public`, when all users have an access to the artifact by default. - - It's allowed to change visibility only when artifact has `active` status. - -* *Artifact immutability* - when artifact is *drafted* all its fields - are editable, but when it becomes *active* it is "immutable" and cannot be modified - (except for those fields explicitly declared as `mutable`). - -* *Base type json-schema*: - - .. code-block:: javascript - - { - "name": "Base artifact type", - "properties": { - "activated_at": { - "description": "Datetime when artifact has became active.", - "filter_ops": ["eq", - "neq", - "in", - "gt", - "gte", - "lt", - "lte" - ], - "format": "date-time", - "glareType": "DateTime", - "readOnly": true, - "required_on_activate": false, - "sortable": true, - "type": ["string", - "null" - ] - }, - "created_at": { - "description": "Datetime when artifact has been created.", - "filter_ops": ["eq", - "neq", - "in", - "gt", - "gte", - "lt", - "lte" - ], - "format": "date-time", - "glareType": "DateTime", - "readOnly": true, - "sortable": true, - "type": "string" - }, - "description": { - "default": "", - "description": "Artifact description.", - "filter_ops": ["eq", - "neq", - "in" - ], - "glareType": "String", - "maxLength": 4096, - "mutable": true, - "required_on_activate": false, - "type": ["string", - "null" - ] - }, - "id": { - "description": "Artifact UUID.", - "filter_ops": ["eq", - "neq", - "in" - ], - "glareType": "String", - "maxLength": 255, - "pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", - "readOnly": true, - "sortable": true, - "type": "string" - }, - "metadata": { - "additionalProperties": { - "type": "string" - }, - "default": {}, - "description": "Key-value dict with useful information about an artifact.", - "filter_ops": ["eq", - "neq" - ], - "glareType": "StringDict", - "maxProperties": 255, - "required_on_activate": false, - "type": ["object", - "null" - ] - }, - "name": { - "description": "Artifact Name.", - "filter_ops": ["eq", - "neq", - "in" - ], - "glareType": "String", - "maxLength": 255, - "required_on_activate": false, - "sortable": true, - "type": "string" - }, - "owner": { - "description": "ID of user/tenant who uploaded artifact.", - "filter_ops": ["eq", - "neq", - "in" - ], - "glareType": "String", - "maxLength": 255, - "readOnly": true, - "required_on_activate": false, - "sortable": true, - "type": "string" - }, - "status": { - "default": "drafted", - "description": "Artifact status.", - "enum": ["drafted", - "active", - "deactivated", - "deleted" - ], - "filter_ops": ["eq", - "neq", - "in" - ], - "glareType": "String", - "sortable": true, - "type": "string" - }, - "tags": { - "default": [], - "description": "List of tags added to Artifact.", - "filter_ops": ["eq", - "neq", - "in" - ], - "glareType": "StringList", - "items": { - "type": "string" - }, - "maxItems": 255, - "mutable": true, - "required_on_activate": false, - "type": ["array", - "null" - ] - }, - "updated_at": { - "description": "Datetime when artifact has been updated last time.", - "filter_ops": ["eq", - "neq", - "in", - "gt", - "gte", - "lt", - "lte" - ], - "format": "date-time", - "glareType": "DateTime", - "readOnly": true, - "sortable": true, - "type": "string" - }, - "version": { - "default": "0.0.0", - "description": "Artifact version(semver).", - "filter_ops": ["eq", - "neq", - "in", - "gt", - "gte", - "lt", - "lte" - ], - "glareType": "String", - "pattern": "/^([0-9]+)\\.([0-9]+)\\.([0-9]+)(?:-([0-9A-Za-z-]+(?:\\.[0-9A-Za-z-]+)*))?(?:\\+[0-9A-Za-z-]+)?$/", - "required_on_activate": false, - "sortable": true, - "type": "string" - }, - "visibility": { - "default": "private", - "description": "Artifact visibility that defines if artifact can be available to other users.", - "filter_ops": ["eq"], - "glareType": "String", - "maxLength": 255, - "sortable": true, - "type": "string" - } - }, - "required": ["name"], - "type": "object" - } - -Basics ------- - -Glare API complies with OpenStack API-WG guidelines: - - * `Filtering, sorting and pagination - `_ - - * `Errors - `_ - -For updating artifact field values, Glare API uses `json-patch -`_. - -Glare supports microversions to define what API version it should use: -`API-WG microversion guidelines `_. - -For description of artifact type `json-schema `_ is -used. - -Media types -^^^^^^^^^^^ - -Currently this API relies on JSON to represent states of REST resources. - -Error states -^^^^^^^^^^^^ - -The common HTTP Response Status Codes (https://github.com/for-GET/know-your-http-well/blob/master/status-codes.md) are used. - -Application root [/] -^^^^^^^^^^^^^^^^^^^^ -Application Root provides links to all possible API versions for Glare. URLs -for other resources described below are relative to Application Root. - -API schemas root [/schemas/] -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -All the API urls are relative to schemas of artifact types. - -* **List of enabled artifact type schemas** - * **GET /schemas** - JSON-schemas list of all enabled artifact types - - * HTTP Responses: - * 200 - - * Response schema: JSON dictionary with elements : - -* **Get artifact type schema** - * **GET /schemas/{artifact_type}** - get JSON-schema of artifact type `artifact_type` - - * HTTP Responses: - * 200 if `artifact_type` is enabled - * 404 if no artifact type is defined to handle the specified value of `artifact_type` - - * Response schema: JSON-schema for requested type - - -API artifacts root [/artifacts/] -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -All the API urls are relative to artifacts. - -All the APIs which are specific to the particular artifact type are -placed to `/artifacts/{artifact_type}`, where `artifact_type` is a constant -defined by the artifact type definition (i.e. by the related oslo_vo class). -For example, for artifacts of type "images" the API endpoints will start -with `/artifacts/images`. - -The `artifact_type` constant should unambiguously identify the -artifact type, so the values of this constants should be unique among all the -enabled artifact types. - - -* **List artifacts** - * **GET /artifacts/{artifact_type}** - list artifacts of given type - - Returns the list of artifacts having the specified type and scoped - by the current tenant. If the user is ``administrator``, it - returns the artifacts owned by all the tenants. - - * **GET /artifacts/all** - list artifacts regardless of their type - - Returns the list of artifacts of all types for given tenant. Only - common fields will be shown in the output. All type-specific fields - are skipped. - - * URL parameters: - * `artifact_type` identifier of the artifact type, should be equal to a - valid constant defined in one of the enabled oslo_vo classes. - - * Query parameters: - Query may contain parameters intended for filtering and soring by most - of the common and type-specific artifact fields. - The set of parameters and their values should be compliant to the - schema defined by the artifact type and its version. - - **Filtering**: - - * Filter keys may be any common or type-specific fields of - primitive type, like 'String', 'Float', 'Integer' and 'Boolean'. Also - it is possible to filter artifacts by Dict keys and Dict or List - values. - - Direct comparison requires a field name to be specified as query - parameter and the filtering value as its value, e.g. `?name=some_name` - - Parameter names and values are case sensitive. - - * Artifact API supports filtering operations in the format - `?name=:some_name`, where `op` is one of the following: - - 1. **eq**: equal; - 2. **neq**: not equal; - 3. **gt**: greater than; - 4. **gte**: greater or equal than; - 5. **lt**: lesser than; - 6. **lte**: lesser or equal than; - 7. **in**: in a list of. - - Operator `eq` is default and may be omitted, i.e. filter - `=eq:` is equal to `=` - - * Set comparison requires a field name to be specified as query - parameter. The parameter may be repeated several times, e.g. the query - `?name=qwerty&version=gt:1.0&version=lt:5.0` will filter the artifacts - having name `qwerty` and versions from 1.0 to 5.0 excluding. - - * If it's required to filter the artifacts by any of the values, **in** operator - should be used. List of comma-separated values should be provided for - this operator. Query `?name=in:abc,def,ghi` will return all artifacts - with names `abc`, `def` and `ghi`. - - * Filtering by Dict values is performed in format - `.=[:]`. This filter returns only - those artifacts, that have the key `key_name` in their Dict `dict_name` - and the `value` of the key satisfies the right part of the filter. - It is allowed to filter values for Dict of primitive types only. - - * Dicts can be filtered by their keys in format - `dict_name`=[:]. Only `eq`, `neq`, `in` can be used - as filtering operators. For `eq`, it returns all artifacts, that have - key `value` in their Dict field `dict_name`; for `neq`, it returns all artifacts - that don't have that key in Dict `dict_name`; for `in`, it returns - artifacts with any of the keys in comma-separated list `value`. - - * Filtering by List values may be performed in the same manner as by - Dict keys. - - **Sorting** - - In order to retrieve data in any sort order and direction, artifacts - REST API accepts multiple sort keys and directions. - - Artifacts API will align with the `API Working group sorting guidelines - `_ and support the following parameter on - the request: - - * sort: Comma-separated list of sort keys. Each key is optionally - appended with <:dir>, where 'dir' is the direction for the - corresponding sort key (supported values are 'asc' for ascending - and 'desc' for descending) - - Sort keys may be any generic and type-specific metadata fields of - primitive type, like 'string', 'numeric', 'int' and 'bool'. But sorting - by type-specific fields is allowed only when artifact version - is provided. - - Default value for sort direction is 'desc'. Default value for sort key - is 'created_at'. - - **Pagination** - - `limit` and `marker` query parameters may be used to paginate through - the artifacts collection in the same way as it is done in the current - version of Glance "List Images" API. - - Maximum `limit` number is 1000. It is done for security reasons to protect - the system from intruders to prevent them from sending requests that can - pull the entire database at a time. - - * HTTP Responses: - * 200 if `artifact_type` is enabled - * 400 if query has incorrect filter or sort parameters - * 404 if no artifact type is defined to handle the specified value of `artifact_type` - - * Response schema: - - .. code-block:: javascript - - { - "": [], - "first": "/artifacts/", - "schema": "/schemas/", - "next": "" - } - -* **Create a new artifact** - - * **POST /artifacts/{artifact_type}** - - * Creates a new artifact record in database. The status of artifact is set - to `drafted`. Request body may contain initial metadata of the artifact. - It's mandatory to define at least artifact `name` and `version` in the request - body. - - * URL parameters: - - * `artifact_type` identifier of the artifact type. It should be equal to a - valid constant defined in one of the enabled oslo_vo classes. - - * HTTP Responses: - - * 201 if everything went fine. - - * 409 if an artifact of this type with the same name and version already - exists for tenant. - - * 400 if incorrect initial values were provided in request body. - - * 404 if no Artifact Type is defined to handle the specified value of `artifact_type`. - - * Request content-type: `application/json` - - * Response content-type: `application/json` - - * Response schema: JSON with definition of created artifact - -* **Get an artifact info** - - * **GET /artifacts/{artifact_type}/{id}** - - * Returns an artifact record with all the common and type-specific fields - - * URL parameters: - - * `artifact_type` identifier of the artifact type. It should be equal to a - valid constant defined in one of the enabled oslo_vo classes. - - * `id` identifier of the artifact. - - * HTTP Responses: - - * 200 if everything went fine. - - * 404 if no artifact with the given ID was found. - - * 404 if the type of the found artifact differs from the type specified by `artifact_type` - parameter. - - * Response content-type: `application/json`. - - * Response body: JSON with artifact definition. - - * **GET /artifacts/all/{id}** - - * Returns an artifact record with common fields only, regardless of its type. - - * URL parameters: - - * `id` identifier of the artifact - - * HTTP Responses: - - * 200 if everything went fine - - * 404 if no artifact with the given ID was found - - * Response content-type: `application/json` - - * Response schema: JSON with artifact definition - -* **Update an Artifact** - - * **PATCH /artifacts/{artifact_type}/{id}** - - * Updates artifact's fields using json-patch notation. If the artifact - has a status other than `drafted` then only mutable fields may be updated. - - * URL parameters: - - * `artifact_type` identifier of the artifact type, should be equal to a - valid constant defined in one of the enabled oslo_vo classes. - - * `id` identifier of the artifact. - - * HTTP Responses: - - * 200 if everything went fine. - - * 404 if no artifact with the given ID was found. - - * 404 if the type of the found artifact differs from type specified by - `artifact_type` parameter. - - * 403 if the PATCH attempts to modify the immutable field while the - artifact's state is other than `drafted`. - - * 400 if incorrect initial values were provided in request body. - - * 409 if artifact with updated name and version already exists for the - tenant. - - * Request content-type: `application/json-patch+json` - - * Response content-type: `application/json` - - * Response body: JSON definition of updated artifact - -* **Delete an Artifact** - - * **DELETE /artifacts/{artifact_type}/{id}** - - * Deletes an artifact db record and all its binary data from store. - - * URL parameters: - - * `artifact_type` identifier of the artifact type. It should be equal to a - valid constant defined in one of the enabled oslo_vo classes. - - * `id` identifier of the artifact - - * HTTP Responses: - - * 204 if everything went fine. - - * 404 if no artifact with the given ID was found. - - * 404 if the type of the found artifact differs from type specified by - `artifact_type` parameter. - -* **Upload a blob** - - * **PUT /artifacts/{artifact_type}/{id}/{blob_name}[/{key_name}]** - - * Uploads binary data to a blob field. - - * URL parameters: - - * `artifact_type` identifier of the artifact type, should be equal to a - valid constant defined in one of the enabled oslo_vo classes. - - * `id` identifier of the artifact. - - * `blob_name` name of blob field. - - * optional: `key_name` name of a key if user uploads data in blob - dictionary. - - * HTTP Responses: - - * 200 if everything went fine. - - * 404 if no artifact with the given ID was found. - - * 404 if the type of the found artifact differs from type specified by. - `artifact_type` parameter. - - * 400 if `blob_name` field doesn't exist in `artifact_type` or it's not - a blob field. - - * 409 if blob is already uploaded and has status `active`. - - * 409 if blob has status `saving`. - - * 413 if blob size exceeds the limit specified by artifact type. - - * Request content-type: any, except - `application/vnd+openstack.glare-custom-location+json`. - - * Response content-type: `application/json`. - - * Response body: JSON definition of the artifact. - -* **Download a blob** - - * **GET /artifacts/{artifact_type}/{id}/{blob_name}[/{key_name}]** - - * Downloads binary data from a blob field. - - * URL parameters: - - * `artifact_type` identifier of the artifact type, should be equal to a - valid constant defined in one of the enabled oslo_vo classes. - - * `id` identifier of the artifact. - - * `blob_name` name of blob field. - - * optional: `key_name` name of a key if user downloads data from blob - dictionary. - - * HTTP Responses: - - * 200 if everything went fine. - - * 301 if blob has `external` location. - - * 404 if no artifact with the given ID was found. - - * 404 if the type of the found artifact differs from type specified by - `artifact_type` parameter. - - * 400 if `blob_name` field doesn't exist in `artifact_type` or it's not - a blob field. - - * 403 if artifact has status `deactivated`. - - * Response content-type: specified by `content-type` field from the blob - description. - - * Response body: binary data of the blob. - -* **Add location to a blob** - - * **PUT /artifacts/{artifact_type}/{id}/{blob_name}[/{key_name}]** - - * Adds external location to a blob field instead of upload data. - - * URL parameters: - - * `artifact_type` identifier of the artifact type, should be equal to a - valid constant defined in one of the enabled oslo_vo classes. - - * `id` identifier of the artifact. - - * `blob_name` name of blob field. - - * optional: `key_name` name of a key if user inserts location in blob - dictionary. - - * HTTP Responses: - - * 200 if everything went fine. - - * 404 if no artifact with the given ID was found. - - * 404 if the type of the found artifact differs from type specified by - `artifact_type` parameter. - - * 400 if `blob_name` field doesn't exist in `artifact_type` or it's not - a blob field. - - * 409 if blob is already uploaded and has status `active`. - - * 409 if blob has status `saving`. - - * Request content-type: `application/vnd+openstack.glare-custom-location+json`. - - * Response content-type: `application/json`. - - * Response body: JSON definition of the artifact. - -.. note:: - - Json-schema for `application/vnd+openstack.glare-external-location+json` and - `application/vnd+openstack.glare-internal-location+json`: - - .. code-block:: javascript - - { - "type": "object", - "properties": { - "url": {"type": ["string", "null"], "format": "uri", - "max_length": 255} - }, - "required": ["url"] - } - -A detailed example -^^^^^^^^^^^^^^^^^^ - -For this example, we have an artifact type 'example_type' with fields: - -* id: StringField - -* name: StringField - -* visibility: StringField - -* status: StringField - -* blob_file: BlobField - -* metadata: DictOfStringsField - -* version: VersionField - -.. note:: - - For output simplicity this artifact type doesn't contain all required - fields from Base artifact type. - -1. Create artifact - - Request: - - * Method: POST - - * URL: http://host:port/artifacts/example_type - - * Body: - - .. code-block:: javascript - - { - "name": "new_art", - "version": "1.0" - } - - Response: - - 201 Created - - .. code-block:: javascript - - { - "status": "drafted", - "name": "new_art", - "id": "art_id1", - "version": "1.0.0", - "blob_file": null, - "metadata": {}, - "visibility": "private" - } - -2. Get artifact - - Request: - - * Method: GET - - * URL: http://host:port/artifacts/example_type/art_id1 - - Response: - - 200 OK - - .. code-block:: javascript - - { - "status": "drafted", - "name": "new_art", - "id": "art_id1", - "version": "1.0.0", - "blob_file": null, - "metadata": {}, - "visibility": "private" - } - -3. List artifacts - - Request: - - * Method: GET - - * URL: http://host:port/artifacts/example_type - - Response: - - 200 OK - - .. code-block:: javascript - - { - "example_type": [{ - "status": "drafted", - "name": "new_art", - "id": "art_id1", - "version": "1.0.0", - "blob_file": null, - "metadata": {}, - "visibility": "private" - }, { - "status": "drafted", - "name": "old_art", - "id": "art_id2", - "version": "0.0.0", - "blob_file": null, - "metadata": {}, - "visibility": "private" - }, { - "status": "drafted", - "name": "old_art", - "id": "art_id3", - "version": "1.0.0", - "blob_file": null, - "metadata": {}, - "visibility": "private" - }], - "first": "/artifacts/example_type", - "schema": "/schemas/example_type" - } - - Request: - - * Method: GET - - * URL: http://host:port/artifacts/example_type?name=eq:old_art - - Response: - - 200 OK - - .. code-block:: javascript - - { - "example_type": [{ - "status": "drafted", - "name": "old_art", - "id": "art_id2", - "version": "0.0.0", - "blob_file": null, - "metadata": {}, - "visibility": "private" - }, { - "status": "drafted", - "name": "old_art", - "id": "art_id3", - "version": "1.0.0", - "blob_file": null, - "metadata": {}, - "visibility": "private" - }], - "first": "/artifacts/example_type?name=eq%3Aold_art", - "schema": "/schemas/example_type" - } - -4. Update artifact - - Request: - - * Method: PATCH - - * URL: http://host:port/artifacts/example_type/art_id1 - - * Body: - - .. code-block:: javascript - - [{ - "op": "replace", - "path": "/name", - "value": "another_artifact" - }, { - "op": "add", - "path": "/metadata/item", - "value": "qwerty" - }] - - Response: - - 200 OK - - .. code-block:: javascript - - { - "status": "drafted", - "name": "another_artifact", - "id": "art_id1", - "version": "1.0.0", - "blob_file": null, - "metadata": { - "item": "qwerty" - }, - "visibility": "private" - } - -5. Upload blob - - Request: - - * Method: PUT - - * URL: http://host:port/artifacts/example_type/art_id1/blob_file - - * Body: ``some binary data`` - - Response: - - 200 OK - - .. code-block:: javascript - - { - "status": "drafted", - "name": "another_artifact", - "id": "art_id1", - "version": "1.0.0", - "metadata": { - "item": "qwerty" - }, - "blob_file": { - "status": "active", - "checksum": "8452e47f27b9618152a2b172357a547d", - "external": false, - "size": 594, - "content_type": "application/octet-stream", - "md5": "35d83e8eedfbdb87ff97d1f2761f8ebf", - "sha1": "942854360eeec1335537702399c5aed940401602", - "sha256": "d8a7834fc6652f316322d80196f6dcf294417030e37c15412e4deb7a67a367dd", - "url": "/artifacts//example_type/art_id1/blob_file" - }, - "visibility": "private" - } - -6. Download blob - - Request: - - * Method: GET - - * URL: http://host:port/artifacts/example_type/art_id1/blob_file - - Response: - - 200 OK - - Body: ``blob binary data`` - - -7. Activate artifact - - Request: - - * Method: PATCH - - * URL: http://host:port/artifacts/example_type/art_id1 - - * Body: - - .. code-block:: javascript - - [{ - "op": "replace", - "path": "/status", - "value": "active" - }] - - Response: - - 200 OK - - .. code-block:: javascript - - { - "status": "active", - "name": "another_artifact", - "id": "art_id1", - "version": "1.0.0", - "metadata": { - "item": "qwerty" - }, - "blob_file": { - "status": "active", - "checksum": "8452e47f27b9618152a2b172357a547d", - "external": false, - "size": 594, - "content_type": "application/octet-stream", - "md5": "35d83e8eedfbdb87ff97d1f2761f8ebf", - "sha1": "942854360eeec1335537702399c5aed940401602", - "sha256": "d8a7834fc6652f316322d80196f6dcf294417030e37c15412e4deb7a67a367dd", - "url": "/artifacts//example_type/art_id1/blob_file" - }, - "visibility": "private" - } - -8. Deactivate artifact - - Request: - - * Method: PATCH - - * URL: http://host:port/artifacts/example_type/art_id1 - - * Body: - - .. code-block:: javascript - - [{ - "op": "replace", - "path": "/status", - "value": "deactivated" - }] - - Response: - - 200 OK - - .. code-block:: javascript - - { - "status": "deactivated", - "name": "another_artifact", - "id": "art_id1", - "version": "1.0.0", - "metadata": { - "item": "qwerty" - }, - "blob_file": { - "status": "active", - "checksum": "8452e47f27b9618152a2b172357a547d", - "external": false, - "size": 594, - "content_type": "application/octet-stream", - "md5": "35d83e8eedfbdb87ff97d1f2761f8ebf", - "sha1": "942854360eeec1335537702399c5aed940401602", - "sha256": "d8a7834fc6652f316322d80196f6dcf294417030e37c15412e4deb7a67a367dd", - "url": "/artifacts//example_type/art_id1/blob_file" - }, - "visibility": "private" - } - -9. Reactivate artifact - - Request: - - * Method: PATCH - - * URL: http://host:port/artifacts/example_type/art_id1 - - * Body: - - .. code-block:: javascript - - [{ - "op": "replace", - "path": "/status", - "value": "active" - }] - - Response: - - 200 OK - - .. code-block:: javascript - - { - "status": "active", - "name": "another_artifact", - "id": "art_id1", - "version": "1.0.0", - "metadata": { - "item": "qwerty" - }, - "blob_file": { - "status": "active", - "checksum": "8452e47f27b9618152a2b172357a547d", - "external": false, - "size": 594, - "content_type": "application/octet-stream", - "md5": "35d83e8eedfbdb87ff97d1f2761f8ebf", - "sha1": "942854360eeec1335537702399c5aed940401602", - "sha256": "d8a7834fc6652f316322d80196f6dcf294417030e37c15412e4deb7a67a367dd", - "url": "/artifacts//example_type/art_id1/blob_file" - }, - "visibility": "private" - } - -10. Publish artifact - - Request: - - * Method: PATCH - - * URL: http://host:port/artifacts/example_type/art_id1 - - * Body: - - .. code-block:: javascript - - [{ - "op": "replace", - "path": "/visibility", - "value": "public" - }] - - Response: - - 200 OK - - .. code-block:: javascript - - { - "status": "active", - "name": "another_artifact", - "id": "art_id1", - "version": "1.0.0", - "metadata": { - "item": "qwerty" - }, - "blob_file": { - "status": "active", - "checksum": "8452e47f27b9618152a2b172357a547d", - "external": false, - "size": 594, - "content_type": "application/octet-stream", - "md5": "35d83e8eedfbdb87ff97d1f2761f8ebf", - "sha1": "942854360eeec1335537702399c5aed940401602", - "sha256": "d8a7834fc6652f316322d80196f6dcf294417030e37c15412e4deb7a67a367dd", - "url": "/artifacts//example_type/art_id1/blob_file" - }, - "visibility": "public" - } - -11. Delete artifact - - Request: - - * Method: DELETE - - * URL: http://host:port/artifacts/example_type/art_id1 - - Response: - - 204 No Content - -References -========== -#. `Filtering and sorting API-WG guideline `_ - -#. `Errors API-WG guideline `_ - -#. `json-patch description `_ - -#. `json-schema description `_ diff --git a/doc/source/guides/configuration_guide.rst b/doc/source/guides/configuration_guide.rst deleted file mode 100644 index ddaacdc..0000000 --- a/doc/source/guides/configuration_guide.rst +++ /dev/null @@ -1,4 +0,0 @@ -Glare Configuration Guide -========================= - -TBD diff --git a/doc/source/guides/dashboard_guide.rst b/doc/source/guides/dashboard_guide.rst deleted file mode 100644 index 42527c0..0000000 --- a/doc/source/guides/dashboard_guide.rst +++ /dev/null @@ -1,4 +0,0 @@ -Glare Dashboard Installation Guide -================================== - -TBD diff --git a/doc/source/guides/glareclient_guide.rst b/doc/source/guides/glareclient_guide.rst deleted file mode 100644 index 2eda50d..0000000 --- a/doc/source/guides/glareclient_guide.rst +++ /dev/null @@ -1,85 +0,0 @@ -Glare Client Installation Guide -=============================== - -To install ``python-glareclient``, it is required to have ``pip`` -(in most cases). Make sure that ``pip`` is installed. Then type:: - - $ pip install python-glareclient - -Or, if it is needed to install ``python-glareclient`` from master branch, -type:: - - $ pip install git+https://github.com/openstack/python-glareclient.git - -After ``python-glareclient`` is installed you will see command ``glare`` -in your environment. - -Glare client also provides a plugin ``openstack artifact`` to OpenStack client. -If glare client is supposed to be used with OpenStack cloud then additionally -``python-openstackclient`` has to be installed:: - - $ pip install python-openstackclient - - -Configure authentication against Keystone ------------------------------------------ - -If Keystone is used for authentication in Glare, then the interraction has to -be organized with openstackclient plugin ``openstack artifact`` and the -environment should have auth variables:: - - $ export OS_AUTH_URL=http://:5000/v3 - $ export OS_TENANT_NAME=tenant - $ export OS_USERNAME=admin - $ export OS_PASSWORD=secret - $ export OS_GLARE_URL=http://:9494 (optional, by default URL=http://localhost:9494/) - -And in the case when you are authenticating against keystone over https:: - - $ export OS_CACERT= - -.. note:: In client, we can use both Keystone auth versions - v2.0 and v3. But server supports only v3. - -You can see the list of available commands by typing:: - - $ openstack artifact --help - -To make sure Glare client works, type:: - - $ openstack artifact type-list - -Configure authentication against Keycloak ------------------------------------------ - -Glare also supports authentication against Keycloak server via OpenID Connect protocol. -In this case ``glare`` command must be used. -In order to use it on the client side the environment should look as follows:: - - $ export KEYCLOAK_AUTH_URL=https://:/auth - $ export KEYCLOAK_REALM_NAME=my_keycloak_realm - $ export KEYCLOAK_USERNAME=admin - $ export KEYCLOAK_PASSWORD=secret - $ export OPENID_CLIENT_ID=my_keycloak_client - $ export OS_GLARE_URL=http://:9494 (optional, by default URL=http://localhost:9494) - -.. note:: If KEYCLOAK_AUTH_URL is set then authentication against KeyCloak will be used - -You can see the list of available commands by typing:: - - $ glare --help - -To make sure Glare client works, type:: - - $ glare type-list - -Send tokens directly without authentication -------------------------------------------- - -Glare has a possibility to send tokens directly. -In order to use it on the client side the environment should look as follows:: - - $ export OS_GLARE_URL=http://:9494 (optional, by default URL=http://localhost:9494) - $ export AUTH_TOKEN=secret_token - -.. note:: It's more convenient to specify token as a command parameter in format ``--auth-token``, - for example, ``glare --auth-token secret_token type-list`` diff --git a/doc/source/guides/hooks_guide.rst b/doc/source/guides/hooks_guide.rst deleted file mode 100644 index ecf068a..0000000 --- a/doc/source/guides/hooks_guide.rst +++ /dev/null @@ -1,4 +0,0 @@ -Custom Actions Hooks Guide -========================== - -TBD diff --git a/doc/source/guides/installation_guide.rst b/doc/source/guides/installation_guide.rst deleted file mode 100644 index 20a8e44..0000000 --- a/doc/source/guides/installation_guide.rst +++ /dev/null @@ -1,4 +0,0 @@ -Glare Installation Guide -======================== - -TBD diff --git a/doc/source/guides/upgrade_guide.rst b/doc/source/guides/upgrade_guide.rst deleted file mode 100644 index 235f849..0000000 --- a/doc/source/guides/upgrade_guide.rst +++ /dev/null @@ -1,7 +0,0 @@ -Glare Upgrade Guide -=================== - -Database Upgrade ----------------- - -TBD diff --git a/doc/source/images/glare-architecture.png b/doc/source/images/glare-architecture.png deleted file mode 100644 index 64b491f6ab16e018453dc9dfb3b3d72d9af9ba53..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 41233 zcmd43c{r7C_dcw7kWe9Hrjj-xQ$os+gphHY!ZvPX9wLdfL8gNg6ckiS zSLHM)D0Xqf{~g`G6MoXqAg4eASmJC&R#y43mo%@qfy43yWS5A`=TgQ((ZRI=Q zRx0dH^CSPC0>20T>mK(1{x>@s`Q_HnxBgv2kNo7n|0aKwV(aIBzefD?7ks%$0)Teq}?9m_tuNb4Q0osF9YImY&|&E9a26V*OPtkBWza zt}Z^o?m+IhQKF;Bbz(0S(>vD2N@6EXqCSWD!hVxU|pe(D3kUsXN-wQV0aM&yT6XOzVB;o?(-clb0qs zvkD3dhWYm$J`wVxXo*HdM5OvT%Y-S1@1Jk|A*f3P3W|ic8OG&h=zRO>(&ae$)nQ_! zLvImAlOVk|^SPy^B`hrL^AqZrX$>76_qib@JzhS(qn1a4g!4}*g_cUMFKFJeJVH-T zuxUS2on~(6UnM&+F@cjjNF)*kj7m=P;k6ECcb#!6oZ#1J^PxT^uA`GkiQN2di7Sr} zDP6zr5?gIRAlQ97seO!%O@J3Kba(A{x=z-33_ZWW>42lBR8NRvyHeHP%9%E^eJJV|ffeC%e5 ziHR9)e9f0B;j`+(tEW29Al6?pEkVt4;TUPBIVAwVimVp#l@ACl`zgR9v+^K=Ng)t!fwBGL#adG`1|jl4TGmU zj>j}}d9KW1copZG&RscHZ7`idd*uut--}rMG_ym=_X4=A==t+k@Ae)GeEO7obaA2+ z*I!c=$iNfh8b9LG#7Nqy4>4A8VDF*h=X^K31LaEZG$&)J7=^e+MB0BUc`uPHr}@wj zrUK;*Z=W>ou(PwHzBqH~+O?-YE@x1mxgIO79$zc&v%0W{lK%MEh=bgr>rdPFSxdOW z?pIaMSX&-Uwtde+!l`Kp!t_?88}>fQtPb5QA^4Y6a~fY=g0~e7TsL9 zyw29#+zkIqY$xWG6(%f?PBwl zE~0Vvh53)>O9!*vzrs>oCA{R*xe$W71>vHRyy51xIF5OE(e-v?WBbdWI}rd)Xzl3e z`1QC0P@u>|s!A*kC7-u(diq|1bR5wK7c z=9F+ja#A2?fq$R-2P_ogdmITABuI6)xt@`BT5!R?*wHjJGd{bJPb2YILD^I2oRXI( zA}M)ix~~+lU(d!Sttc`vk+Wr&#L%HDot0!b`zVi$V~9L-wZDZzg%o8s0F zJ$R{=t}QiJ!~!GM5w5OL#=(C^4q=K-1E^D95d{V^zi&XoLpRhhw4;TSnRW> zPY0QI?0D?#%2Nd2Tkt-JrRhU*T3Q+&-!hU+j1-6r5AP}QoG*(xacnSVCgEO%pE!c^ z_+je^6uJXIv8ziraDkRf;cAieQ-f*E7-7rj4M!^DhZ22O$5NJihZ1PaDJ#r^#mySX3#tGX!-{_0uv{ z21b%QeEn*2>*irmJz_+- zo`HdZzP`;6Z8>ebuuxk^hxW~z#|6~lF6!y%Se8Hrp~e_XM6B>X7z&NWYiMhq9A;jR z>UbhN7uhphaer67eXmf<$^{65OGH?&%uw9i+#DPu|5HlWoSmJmtbW3M)?^_)H8;VL)6bYh>PjB| z{P`1*)yJ?=rSXPaq!0$P_InocM!Oiwpy0-lQ>wAz*sgQDvUL#9w}$Ip!eNOK!aV(T zgprZ=4-7v`rpAg2Y?`Xm5ql-NC;M3p;Z3=-ZfvAZD?<3vbyN zrH4#j$aB1U^~&@zgMd++aowiO_wV2Fff<2GbjOa>JpFZ=>AXdK6nxCbD_(|Z*XViFZQ)alcFx~K2E3rR?yc$}Sj2Q~TdOdrJxvK%7WSXt4Kv(lQB z??#CfsmNnC(c7o4uC89ObwXhndDut+=q{eesTzh(oIP`=;4 zxv|{9Q-Ay(4^Iv0>3w-Qxf;?nj>lM@lP9AU6K>xwfQ&)xxVyfp^xes-RDh23;8Im# z(Gf#9s#YYYH`oI}0gmAW!>f|9hUIq+%cP$iW)ZqZ!`Ko8`yYeniO{?&;KdM^8i(}( z{*w6iZBi~!nSkRWqM~)4(PA->Y2>i=TZx8+7*YCpOz|u~e?;jJs_C^P1L*<9W8{$C zUp~++i1vD+AtzS}5PqUY8;y<`YHDZ@xmMHEBvwQ_X@_Uc;n}C3Z~wY9hY2XjU{zpM z_JUjT@82Gdyu4}e-`D<>3XlB4?k?+)h37>tOU2$d*umRDBhGq^Cgk6d>@+9tyt8Wv z7OgMKxV*btzjR(uNT}u9nSC*550XkebHa@c3^FF#+S7F?4<3ZzUcd#?qmHx+4UJVX z7>7qfY8dg$tc-tm$!8e?dw#70kD0y=Y5ZHhgM+<&2QfUIqv{9(u;!Eo6#ZhC2p3T3 z!PALCPH(&xesb9GY!)3y_RTdGrxGKoBpS-S!bgGtp(1%fTyx8xQ13`t&IvoBO%)($4{|0cMOo;bg=n?fin|kGOmL_5h3n7{-~z0A$hc zrX&Blg!=Ig_#fxClNA{oC{$%*Ax~HQfeaCX+W$-O1RhB|(=q`Y`v&;QA4`}AkT<8C zqBPzI#MTN)DLwni@k~Cz0}38D0BK;iC-e%Pc*2lNocaHHJ#$L$J>=Z?)$?~5Ji_v$ ztX=R3knr$(>>rW=n|yM<6%Mv}@N18@-i-YJr;Z0_4Z|c@dH!pn-o5d^0)A;;RN#NSsKn zp@3v(9#|EGukr&xhpQL_Yv#~~LLgL>|GfYe)x9$K%exP^bI@0O)dTE~?Swh~IST%m z^q}HyB?r0!D|d+yt6P?))gR}kd|)%JlYU=-q0D)>4Oo2h-(~l+YCA?SXBGnv4_}N* z(GQO4DDS+|9?^4ZGpki=@O(qivv3vVyl~1Vxr=vw+5$SUzZQv`E$}5vZrqK?(!i$v zj>QHXaHZD#oH*s6N*~cmONwSm;rx?6c%qdkd8+;1emW-8ZuO_DYXUX&$nygS5Jb~_ z#q*pDUx?}`SnOF_sx)3JDUG*x$n>%e6(z zCXwIEtTG{hc`p@neWLKy_z$7QsdO91#B0wx8!N4nUhvqy(SE?4+>nD70+2?EsfzO1 zI&5WuV6?CZn-;a31KTkk>4gT~)}qvx_7bCgExVO-e_6e;kx0e`2dmQw*b)=UqARqq#z;e?2xEF#d`gM>BhdS5GFfL2jN!Gydz%>m9?W8j15$?MH^E zQ`xNe1_P-dr61r@nQ$^#;hWN3c}fT@31yS+xtpd@7HdgT?k>*J5yJ2JN=FVC54NUi z(rUr=*3fAt6$`WT`$^%b346Kbvp(x9^7>{`QY0^4W#$r|>LU`Zl=&ej!E@?T>*vjt zq7qq}EfQC(y)`m1=Ng?yDfRin%lhVJaqZXU{Dp%~B<6+GTGy{-Tc?MwXyFL{FGPM> z`$`UKtrq!(g#xkovCZ=CH{*H2xS#WAa1x)UwC5_uzCfAPcd!|^;Ylx0vvF@i^`Dr0 zXZnWc@vKaGR=U8|ef_))RV@uuV$gwB2BquI#W1=v1s#jLk9A_->Lv8`7Fxe2@Qz9> z&af}GGU|`)piOo=8<_ti!Q7lOp_R-rXQWwKa~2KXS}=2-4`{s}Xcf}ee~M2ba?p9c z_zc>sxb4dKbc%CnB-Pgv5gL0GpqDLFlt_b;mCNcXHekv{MW-Pf?<_`6IsZK2~ z%Zy&KPR4i)QL_0(2oyzg{#OY1Nd7bic&*tvDZdDpH?^LsXhw<3wv>}z+}*cy z8@p&H4WFpiMqCXpc;1Y);6R(ol-{x4^$oMmb}H9J0`}IKw7Qhr@ymQ+eN0hsbw?GK z>#fx^?W1Q)6*ZpoO{{ipx*NCus!qmAoPH&aK;`cp-u)Gd=6qvXG>f%I-%M&o+6w(`wJF9G53{KnOUoJ1LYzv ztkqwT0|`ON@rf7JHq)OxIuoOuOf`P}IBG|X_z@P616a8O1?jQSWMyeVDwOu(hZ@>V zqxLs)6E|)d-;{J@uU&IBRP+mx&=Mj80~uz|u8DGzp>H>zQ9up?Fdloiz^{7Vk)R9M z6#RWVCUUXa07w4;b~xhfAGXmi1VwKgiXwk<3u_-?X}2;Pg6x4+F?e|-Ay7a)P(x4o z_XK8{h;rf|VPz?M;d0+n~vg0|AupDTbuD`U0J}_$wz;_}EjNQb1 zB3*xR(z-KpCdlX}pRBeqLONU(!tgogX&9dg(H-B@& zQBjdrOh`i%)vm#Rq9|qe{BY!E5~@S~Rm!)XQ1anGinS-^wr;U?Mv2eqqa&I&TRcfl z3DQpCH5+N-)eCctN+Rr+A3aX(J~El~3*%#89*8_G2SNeSGc#DZ<^(^_h=A#`H2PC% zLmqj9V@qGskQCEIPBBEOFI=4MQr8<^rwPSrEEe1G^|m=CN6Js+hVhj1qniXY;=^tE zjBlp5Tibm6kv6b$=UtsqQ6aVze=VtAJ2391YjJL*)8S&VXwlN}q$R8nZqypCG8Q`gcGo1o*2>cn!I;SRY|_|k$HZ1g z!mSOT)qV14EKMrwyh8NNy^(h!$1(hGHkriq97mONpdSlMNvlh}cxN4Q^<}uliBCpF z*=FNMqo3BU#@|iZyQmd(ClAv`ovXs>|2Lo9Fl4lYdwSn@Kwh@$MD+P<{9l8!7gGQD@*PoJ61few?EtLv%~e^}}= z&vNBTuHdaz&H$%wyz9pw`o<|g4IQ6%>^pNXZ_paY*z^&(iVWZSS5f8v0{Jg|d#+jz z&FA#Hzi$^~83h^QV)_|LE#Hz@4~kJefvhO;v~M;=An>TllMj*hWpBk}7iFum>80MY z2DY#ASLIf=kWVuLL@F4OO*QrVk|?Rj^Snct+kGY<>2 zSW)Ig*;+XMmkHvWy8E0jt!$>xgvRPFNsP1GR4U7`oH*S zc<~~0=%a*Wia0U`-CstcnjX;O#b3<^g+5zDe@}BeX-f-P1nwd2yxqD5Ups-~@@+%? z1!|Aer9!5ABwNzua%K9YBL{$p2GP3{axb1MZ*;)o))_ zRqG2)7lYIRFruMaRVa)kF&7KKkv`(b0L! zbbEDS%<;#U*&qb41pPZ4FkdT<$Q(Ip@q(QNpbxUFMcqQpRb?Ain4c6+?JIl#zQ~>4V zS_O-@Y+^M#q-sg0r}avxISMd zXlQ5@oTa$Gd+Vx9I2;bXnh=5u2?+sp)Y*|@=icp%PQjpY<3<9dw!VJ!T0>0@O=}q4 zUh$o~kB~R1!nU%q@)@&OW^(e#MDg1TL(}gyQw9eI6B84sdI~=;?%00_vL)1LGlvJY zKw!^z=oe+J{*ufh>2~fqE8OyJrZNYL=WNvmyJ6wUYBn$!@+m;X!L3dVehUs_*)n#6-M8!0w9;8UhiF$useELiF2UW= zEq232(J$_1M}KvNj?VGTad-0F<~#rV`X*R<)&Ck)j6W{~1qG?AtN#r2-KJ;lg%K#5 z{ZQ3B%0IX|XiWZy+Cqn>6g=34w+cZZe0P#^EKx0yAWu$g?x&`Xm+`%8XNSw?<>cH+ zGKHgv6rh&~1JWm?ylzVWtf{F1wFiy82D&5_mDKIX*OKmEDl7e6U(5I!^&g|Bw-_pM z89TBHl(Fk%RJlc);63a|OZO-Fo1?xPDN!BE<(odXBq6c1)m5)T9&&%f;gcb(tv;Mg zxX-;P6bhOL6(CbK#Yyq8Er0*~gk?+0fq2dBJ8xb?vjETnL5Gy)x_G;9gs)$}j*M97 z#e<;s=@BLTwUSam&yl%KucK!0r4;6YB!Ac~D!C7!i<r50*gX_Tyb&jFRKxPXQqIvvzI-_*0L>?P!ohmfFkhlX_MY(NESp$W*T2H7uQcAiY*}Xnec?o0p*> z3%&}e)}w;_5#hlwF(c!;bboX-Q{V!0FU}g5_2?n00Dg~2q{&9KI*9^LS6>gi)0V2< zF$O&;y8f1G3s(ceZ6uN+;EOMufY;H%^smAq)=?6#+F^fddE;A9hP+hM+a) zi*no|eu;rRLMgeCGwnDFm1B2q8m-EaQ8D?2`^!{CC;lwhtmV z28)6G5>b`5LXKkUax5Z!@PAwrdFw@C+vhp@8ighp6uUL(6A1Rb`bI{LJUe6l-HA&$ z#0qp;R%=4J9%BRUt3;i@xvXufc)g+RzvR+-qT~?8rjx%bEVk4Hf*Z5n0WBT znF_~XVI*YP(e(-s4?BMU^k`=0wyOXK2ZxG^iouCXgq`H_2U)v{9njCA$BRHCjGvMn zegCfx1Gz{a8Ubg`C%(S9Vn7ccAul&P)r`=KQ);5uZHH%nT(1|83a{!2(LM-h$twngft7wXiF4 z=M<$Q65n4SEe@QgqN0M^ZEtTkI9MR?3<-k}lm*vi%bJFUVW{QkLbl-IO7FmrMf$FX zcXp9>>TH9AJW+d~9ldF9nr%Q99m#qd9DPJt0vRa^h16qYwaT62e*r1Q4!d7)um=t! zXkc)`|0Dj8OidQiE)mGJ71VS070Joi{z=ge2FQp16M`Wq9sY7voq*O{44HyA zMmnEU%VSl95B)_X^NBu;TkHt>l#%9>{IoZrXYMGSB93(`^6e(=wEgd5%JQK1&0n98 zG`?{_vs=u^Fa=5v{Su)5Y>}^y_|7^*5!1TbXRz2oZdVF|HOI1AUugncC2d`H`Xq+KdhfrTko3xwMT$3}u z2j@c!K{X2$(J~@cG~o5CpyLLJTBcQ&c(?qr?V}=f>uWM4THnmg`5K!aW^mzoBp*Lc zt=UMHZ+Y?K8z!Bw8}_536ZV!J4ZFi(RZJ_k;oDun=d*70hKasC4mB!~C*I7BR&efg zp0}%s_$7Y6Whonpm9?GQ2^`=X_ng@A`b9Lg$gk#(atWZ@I_6do*@COMvhM;mZ!o{day}*9mXbrXuF=hws)oY z0x*Z{Q9u1%UB8dNT+l7XPHqM<(YzUJL!)nk5>k8eS=%S~#i=A$SPm@qsh{ARPJXAo86d`EA$|zen_YA6crnhZSU|y;uj0Bk(jFWo4DN?Z`+= zJ0o*p9BG+mY_nDHC(QW8ocQ(YM#)J@U{slP< zJAO48+H~W1%%OR)K9Ra%hQHV!VLaC;Ib9zsA$;{AmxP3#y>g(Nq+#J+YSt4KiHyhp zv!drkM3UaUlarG(x3t`+RNM#)T6tDkS=nw$81WGSnK~WW^a;t!NZw48vF>O|Qr<_+ zdN#iK5hcR|yPn|EbM%9{g@uJqdM9CpQr;J7X#gh=VchEQ-q(KmyCLCh%@1i~ut62YY0v02UdA#<;sX}%# zkdj$K^zfIm&Wwz+t45_0Wj`Syn8YB|K&l|-*Dq|$*caVt2m+ywZ;$HeVsj39@BWL; zbLWW>s2B<7WY-JohkDWsKGF+(cK?Sq#JB!eA3c1?ed<)qsUz{N!n?J&b^Grlp-X|U z!pb$xj>I^OmoG$yg_&mR-Q0|;yzvc4Um^k67dZ})IQ!#;=j7yEh+jeD%>y=K>ZI;0 z$>o$J8Qfo;=!K+9`wu0&A0lQf=q<0a$*g^+hRBJFhyV{mMb)f*))a)@@LSo!5Y#zx zs(414Q*LQ(Z7;2l7NKmNqowY6sVq&4EY>zjDCd@uF=k7{$O2c)$;n|99pb9XyY>0; z#`>S87_s0ki_HHXwt>rT(|r*4E}uSswrq@5IvN>#8#=}iQcriEI(14%OUqH#VCP>H zC=7+PeV9$!8}c^%D^AS8NL#z(F%`3s@A~4ECx^9kbih>6oH6WIGm6KOgl0|D`R`7q zOl${RL?lM}K1=GmLekD37G?-ry@&Pm6ir;hZRJ^e!o5OL2&$ZH5CCQ*1C`03#TCLo zkyD?Lx_K{#inMb}lG`C}5!ij0{3lR1APgdKl^XD(fOKIOnFbW^p6`6N{iFXA%fw=s z$$1+|El8p+Adwj<0%r>~b6U97C6^~cJz}Q-s3h)adJdFbCM0YF+z-FSesVMDI3tg(?{3`jaU0F~6QTVcQe%l2ne^B?{c+HkcgyJrR;T+V`Wqq{llUIvW z*VBzy`qIYTi#L+zDttf;-Xael#VwT0Crpg~5Q7g}wSP|W6)p9~yzChI@KZ@EKV7QH z*jUGUK9}A>n8#W+H`QK3&bT zd_f3XeQ0r0LuY-=UH_Y3-{;e;qU@KoN-~lnOW$c3E;-%Jdcq`po#>o96KCV#keTK! zkM1wIzCLrSxYyq|tHrG?5?W1NW26gCXU&Ph<`XfRX`_D7&^oy~GA1fx71-3>lZ5Tl z>FD(rolbv#B9JU9#?t~1?~lX{c_0L%n*z9j)>H&Xpw$9Xm$29F8KYA>dS;R+j{MFduWn$l<}}lInTGtsCKO< zprmaB(N(KuPUkF$1z8t7&LaFA3_%=d{uj9FQ#OWYfOCDNp(FL@UX|xLl5APyam|&w z6;F3Di>e?yQu9n&_Yr3N%lTN*!>iD>!0zJg{&omkr#;N4|H?N@sPxwWkO$w`pwnsG zY=cAD`dlykzBR4oh?Z-?qb`{f*UH8RvdQHLQdBqvVU-rS=+xHjahXg^V^%lji?eJ> z&y}m@$FwG6MJ8Kq)m1C&U{>+Lq{GK9w{Q?hNm?v9Z`{$F)!fClZ*X^$2OJhYb${O!9 z+CH-cWFA;DPhTw1&KSxLDS3VZN$R-+^#Y84Gxt>`47(SZvzOVGA<8$jXrcL|Wd*0iO z1m`X7^65)#h+QJ7 z82~{1SD0?Y;WqSd17`774TOMS{pUrtuH9mT7L?CosN6v|u_Ddkv3s}~_zkYAsIW+T z|89IOS;!=<3I*J?YjyRngx|h>d*$-w40-cU4-e>M8hpR(%Jh%*^;ALfO#z1m5Uskp zYnhJ)=2s`O0DqxZ(=Gk`6 z&Cj!W{p6q|9ijd)$d3L<7VexR+o+~eYrr;j!DD6sEJHOH7%DBl<+I-X{r$s-bDwkZ zcznUx8f}_wZ=F0bV)`~zq+lY3`j&x?&QZksDYf)(b_oejxbNZNVFlm8485FlX6Ah2 zWDXB$(*Y@#Yu(|dXmmbL7!TqW5(qD03*lHE6Osc z!68|HD;{|N{=ETDC%NK>{8kgO3=iNAzA&JwDsue!OU(9NAAl%ccBzxqo&bOqGX}w|!Ka@x#HeV%B3VNq+mNzfc!FVC^~!SGKoKW(OAMxUG{sc=Y)3 zW4Ow5R;g%zgV$E|?5C5^b%^$$AzznW-_38()RmrPf z%rVi?pwkX%oVy6TiUBs@5&>a!#VsCasMQk1WQu!9Ak zEztJbFrG?pMM6d1zqO<5^0z#TCS>?-ZU9Lh8Jpu!P*mJ!^QQwg=?VB3;W$C?-n#=z zPvqY+BEsLf9^7(3w7y*84@Iq<;1R6ovvzgO+u#a=V!?ar9|gXz_&y^e_S~&cu*wkP zEQW-KaE}!p?g}^8shCC2F}6-iOHPc|13}eCqrtV!1~x#k@${@RuRlwZDe-spdnv~Z zm5g((LJf=xK^tj+Zv5$wYcf>rew%Y}z~K(PL6cn-7USlAmzGx7-ycrf@zKkL>9@3K zyf8?IQRl5;xioxy)-4A_nV6YdTUrpMRvnG>mjU)M{-J9F`((~-RMRcb^?yWrr3M0! z?7Jtl7bW@wiz7Qs8RrfSqE*|i$d5(ZS?qrkViIh1x(mGA+}ytnJHJ7Vws32M-fZ*Z z^oNzRQV`FIY(5Y7dOpsvY*LVyN1B~b4;H1JWERDK=s3HJG_L4i3ARvVXVugqm#dyI zhv4(vCiPF$yf7%g`x>DQ(0MQFb}hKGY%iDwU;=lwJl)@anc%hb%UWP(nwq+v9`9&; zNJvH2)NTS+iF0S0H3(WmF{Y(c6bJ#3C(;)rDbM;*ON+z(073vY2O2E_4Gd%Z(_u$d z$sj9!iv{#P(~l4K$gI?=ojo$VNwsp}=Y*u&FL2@IANLcnY&$t>eu(l?gdg?$5#}yS7g%zI=zdZnl2DbSEay>9dWtvS(_*uBh z5h4VPH!2MG_F}+&ez=GhG5`sxOkU08tJloUhd@dWMciHF4;}RWRYkg-bD=lLN|RvU zSHf%j5FDUf-@qjwirUtq@pSLZ2)ECQ)oL3Vv64Gki0%p|FF^1KjIwU|-BN=|a7osd zL2Cd!@?ZEx6&uNR2xN6Zea^rNS`aT)TGdAhc7!rPkj1`4eP3Idud1rTE9>fZL4yJO z95wWaodYtD_J!tAd(F+ss>aMO-9#div0!4c2NIas>fSM2yh0R{}DNNHJF{bfFP zoSkzr^7HbNQc{RUjO)9GmNy*~== zg#!l;te#XqeFqQq5(6AYPq|)hXBU?R$Yn5^;|gGUpBqhe=Vl$YJuCPrYXUT4$({kS zSzy#H^z&es?BX3f0opu#*{Eifm zsntY%Tl4N5B@D`?C`5JtQT(1wKpJq}LD>j$ivnpiQ(b;Zr&z4JYr*w(ckWGam|-p^ zN%PyPG0>&={fpS>Ob|@Up^<|o))1gI_5d z^DQB`du%z6)*&r|c3csPa(rvQ`Gz-gG5I0ynZ*NOH%qg6-Lc?``xMSU@xv^4>6{*P zWrbqzC@c4Di-OlZzZ}xU2w3wjJq6oom`dz*qYWr0z6ag03xAB=Q-Xzzbc>vI96=t! zyLzN23w4YQs!_Y?Y^I5oUVnw_;J1%NZP~!a8D#RCFMtCp;sgT*g$6T&L^)`Yg=C#D z@w_?TapcUgk}b2%;CBqiy?vxlouG0chNW`@FRp0oEF_mksLb7{X2^?^jlpAvvUS4q zuu6?zX>e#wV{R#lK0JKORr7Sli+_b;Fq)J}B&`GqUZ^wp^jJQx9Gd(n)nD(Rv0bN6 z;2!;YmHAWvcF!BEBc^177dSnlX56K@Lb z9LpCGzo*i>X%1bNUK*UQy+aptNx*k!*_6b-~nTq?7UXOfUpIsX0#Lcq^Y zULovcjuR%gG%oJ;yG&RZhSt3}n*Kht#M`A!OZfsdlE0M!F_&mJysl zB*vMAl>Z3rboFc96q=`=i?p7xG74M4B-0>n)vqkA237XQKz% zs|x0-+lhTUV{SYAj>JkVhC1GHUniVG@E{j^-?$^eVE0TEw%Pkz)m}pcg0?#_mkD4a z+CJs?u(A~`i-=$ejd0{{kS3#{ZzFO+f3j$_4xYqVhw);;80%+s*GzYbBy6&xWhHpVUP;u`LW} zGhX+)b91lYdHCWtt0ZDVcVPt76)9DeTcs6WwT`@hEpYAF>EI zAX&`km#;SrI}$4IA0BztLD`x5khi<8(>2=lFKFa&Y?+2)I*h@28vm8vF!->8)*aEj zNCBh8Y6cXw`B^>6JcSH8HQVZzoLW2=nkrBuSgq~(-Ng@+FG5)++^i*^MvyL2aS37w z`U~$ONPW+d#T{OTnZCPyC7w5rz%F#^m3TPH%HFrNL*kl7M8sV44&0+(#R{5@2e`DVh{?MGRDCuWT0!uMIQ=L% zxLK%1C^hSS>j&$2ud^L(hnD2?;C5aLe$MR>uQi)LzaXc6UZM#)6?`R*SwSpFn$u?g z+R-5j^NP@hJ1-`tsReD)*Ar41Sy>FNPlJLWr@7k2q8t;FWLnSvEL=(Z=4;jBJcU{` zabL{x(tI3FvY9mfh6mT~IIBg=$$8A2?hBIu(Ety|eN2*<|D+47{ywM9CY92szZ4js ze8crpI^)djGfQcWgg^|2NM|XYHi^w z(de(we`G&Eyz$% zyC(_!UtXsTv^HhYn~D0)ETt(;`UJT&NDyCS#sb1fr_Co#?W}GNo>$yA?*G4=QdlZa zL2xj#mVBnbfj~hTFTvK%O{GVd)trQONrQSUZnal`S4*3tzGC0Q(B-Hg6ILGErP9x* z7aBkQsf#Fmk$wAQOiFXETF{Ba(qp3&y~-00TwVeL0dU?=zB_f26HUeXwASRVdKQ8B zO?k?%CUGqVoo)g~DQ|1ths-n7u7E$fZ%82)+~giiJE0Q{>e)+-LuNPwv_~L#vpG_{G8j)d5Ao~BtQ{3?Z}jx2B{moB~W)l$sQ3MT?8U9aJh(x2$*a{z-l+; zMJjOX-*zWPbZJ3_^pcL2miFk$>sM7(8ygzr0fB)C|j|B0a1JvR2fjR zB4P)CGu&^5M7>l(QIQu3j{%ZHpZxOWOV`P65yN8@ClR;;&5QGxs)=H{GwbCXqK+@^XY zdTNFtg&^(+%AG+YBO_p!v5fOfcIYol)MI31bSPg3hedwSpK60W+L;EtykIDRk<9x< zFkivc0stSycMwRxOaoI{gSwy#;DKwp#w-UH9=skS3Z@IY)3o?KvQKoFzQHIGXzwtr zJ!~X!=FBS0hr<*qDmgx6Dre02?g~a;{?Xc%V65y77^EgZSy*3Re}C6LD4X@Q>zT8E z^~yZn2R(gUTG~N2@`Rw;?H^x)lwO>Kf!6lUPT##7wcBo(Pz!jNIJnc{krCB+X+wv~ zx~3)>XvFVZ770d@Dae(`U@pw?8PB>yck96$Y9`V_RePYYFf{h;*)tAzy;85d5|s10fYB?c;Up)*DU77%Rk% zagRC=d+Q8;3V4}T_~R~Go%7$1r{fONh0(qa=4`m;^Ai#+0SXy^o|ASW2YtA?n^Me) z16(a*V`DJlx=OJGBF{>BPgfDl*sQI2BZMI)NSBwF*NaOV#1sZ*&_>~t?C2fJHa0fp zch}!_TNM&v#fFTX%_g8KY3L>96})wVTC+Du7o1#zf`VRL9dS~34BGu4AqTkPi3JxY zI+N$@Su9jGqGGU$O;LhBnibhl<;qvC_{ABit8+NPSS&Iz{%RCPMI}Kw$OwP=Qt2_2 z;p1b_xSPZ0=EBImj7SsPp>|Cjcf`i#4-LP3krH4<5n+&<9Rk4zGsyU4|vXs&)|GMT-d8B{N`@<1r!4fch zG+?I*QNCdIx=}74=TNw6)r5Qmdr9!we`b`N73(J@A z8?1GkQwCyc8-tLIX9b}s(=ae8{~8#heiIC;V&A~?|lnr&@u zKZA5v1ug|T7#2EJF{mE;13*{8g)RtUZARv!=$@TCLBu81Av!91?>=Uo6OVEp?vjEPN?augjZp9TiDGH3T(=zbl61Pqu* zlfR_wWA$L?;i=LmWEz#8VOx%ID6=;2(1f!-dWisTL*u&w3Rz(0h^ziR8{1cUhv;Ku zVDO8WW6GEVWRYSTA*4MjV4{(Wc~(I)((7O9wRpr+$SEQ`yr#Ojs|zUkLm1$6o#=qV zwWQDAb*P56ls27DCe9D^1>y;EZex+(sOrzN)qx7KPvhmQSJr(&1)lR3w8_hTcc#At zF^7%nl&@-mR0fY`uBoZ{;$qa`j~@;DJoLa?zzXXK^K-$)k#jIUw_pa>x%kyzOIJ5l z%(Qm%kaVAayDLe+^3LCSO!(+zZNvxb)|0g-n3&S0k1#Xag1+6hToWiI&KNUz|Qk zPuv4@j1_!pY=>%x4N5$+OG-*WqvU$!&{v|Tu6~R5P1~&a2ap)x7&G2dg~FWQq0(tm z;1U;^Eazfn*&%2A^x}$G2_7w&VMy@Voq|Lb8Kp3QvA~(FDuwRu=zbv+_ro(bi1v{_ zBI^|HJ+TLV^&mtuMKwSNN`KE0h6HOsv8@p?vcW+6c5`?48I6}v=!4f0%#cs9PzmpD zr!%RKc!D9mSF0v#=$m0=fwxB6#2GTek80=2T!~A)T$W9He#$rFyPa0ovaf<;=fcxqnZFk}4#r2&oHpx)n`)@e zdHk785z*pR-D!M z;yF3k=XeX1ur0tYD$22FS4#9}3!Ps+&nBKYmK_%t}UzrgXw%KT_E zZ?%W5=-@ALaR1X~3YK0h(aPbeE{+Ne&+rCb?A3iSZ+;K+802uZU|=0%yC5py7&404 z$4Wb>`UabtI669lyYlhFJxy0LV8|H~N^0@N`FdgS2Vm{&suC~w=MQ27uuxa{t)}_5 zqw6|4UG@1oA5?37(S*@6O;g9`CoREW6tGy_J|Eb2YFK~uIacKCHSqQ;mwOdR8Aj-6 zzXz7w{36X>=a1vI>4=vJzh^*&^hrGg#g+oEKiFJBJ5sL1yn6M1bM@V5%>B|Tc(}m8 zKmg=##qJKXjWBsyzPa(|?Q5YWS{YD-G*=6>w)7-F1)fXDjk~(k}6|hQ#u%Vc}t>LfjB3Ra=KaD z+0oVBSO$guNY^YSq*y~Z$gf`m)c=f%I<8)y-+wnIRQK)$3*|^$F!AQJQ8YIro<{>} z0Zr$~v*4`it`aSxZRo5@KRnX=VneA=yGAVKdVr+y*Y5m7Fmiz8+GPylpoy4%5gK}n z0}Uy1$BJr$?~V)7($ZpLk)gW$zCDnt0Q+l`mOfx>>jvd;hIKIF9jc;07(CIC1o|1g zIzvu=NemShb)ugioyP+jAQ;VDch7Kd3R^bn)^vb>de=yAZ=tgyHHlrM6$aXO&B0;C zVU+ouz&H&g2tdRO&QEY{N64QV99OvP*g7Y+=_sDg zYPXvR@}`o{xzN2}p_u*&FRJ;oJgugrGzRF6#r^_atcsU0VE9#aFSLokM`Gflq!uHJ z5);dZ*PZm@d5H6YHy0>gV11mgZD}GnUv(V70WleCrkI;083Da=TPppL-bXsrFl=&o zc2@S#0CBflEDlKKaXi4~&(0uSjjTX{r)I#TYX6vCc%#GW4ll7u)T2iwgP$QnZFH#i z#ncO2#}DW@PwpCm$Y5^aXB?Uy&s{QvDFZ71uh=mhYB?0;Gq6^}Ve{!KuB)#!cj_wH zZVJVq<$26N#r-+fJIhm(?rxl?42+E8qNC585{S<8UU5o3>a{0u<iD znPzJ5Pw!SNaOe5D#?THM0o0uxZFp&euK9}0@8%oA?{mz|%odxcfOUc}e?0g}sO{a2 zwH3f1V9}6;U4b71s6!mU)}4T6N}dJ$Bpr{R^ZjiHj>-@e!-Kc%=#5Wc>bZRRGBEei zAFv4T!P~Js`OW?J%y&qrGLD10-}d%)_VKz`0dhWPlPLp73J@{DIUvXinVSn^t!AdC zt9|m){_xrzfHrswjU@&Xup`j*WkiJZY~@jUAAg|fj@41PgqP%M4wP~Hu{eL`17id+ry!K4^1eJ<)h@m|i7dBoW-rG_0(dwjEI1zzc7N%1HjQd}vjl9O z5q7?vJjD8i!8}myBP;NFH}KH6EyJ66XsJUE#wnb&lphB}EF=2$TTPzMw$7 zFpj!MUV8}Gl7SH|uR1@ifZ@9fw1e$ylrQ7rGMlh)Bt{`Pdze-^thS0WR}F$k$8Lqd z(j2h^@sAqRPKkdKA zhSTB0=L4%*K!AGA5HDS$eIu_6a%_a3!u!KZQ;b0xBb>v;a=qFgFe#US39NE%5`ji2 zhUhpQ*L-^Td!JN)EOn(_Fyg`USe-Gr0!GC~|8T!PDP#L^rvr_u)59kbW9i`7XJ;zX z!Gomnno23Tr<D|a{lefOue9`Cs0tyAfbGra&k~r-Uxouy zq~p_O*N~no@XI9k#E=SLB_Hdfm0l(_IjmOg9d#45AL;97z}q&Y?=W*WkbLidMTellN3Eixy~NF^DTIFD8 zYz2ECjD(Dj+xR}q;bGHUI^vr9HTU?&WE-bg)2Q#Qtv1S~ulZP!U+TRZpB0LJ-yjR~ zbslx^j7`mAB5YKQ!{!`c@uE8}Yvn7$%y?3Ra8e1#^rt|kkFd|8kH65kfVX;0$U1Q~ z66bEL6?jz3sU*UxFZHBisB?&%#44Ndd7f_KntY!A>*V3H25sQx`InmH! zpC4&BTHW&Gy#)rdG8Tnei8?$&x(a@auHpgm#NfBybS`&F#8W~ZE4hD;d6w+t(Z{Sb z-@nt+M4s-Okh!3rs{h=Y_kiY27~UK7=crA#rPEK$c*dAqXVikPRjy^H8xQDggPA^& zD3Pf=^+2}1v`MGM4@*M!&)nFr5SB*L>U45@hYMu93w#75>V6;Bn#B{YX2TH?QoADu z1A@b$PYvAGhJ7V22nNni$6^f&ZgYrAtZanuAVX<3?}(Ga;ubCMCBW zmErB2^Di9}a4$>9(}g+sNPMokEL)_t>j!Qo$Ybt7`tAHg7RjQ<8#!M}FJ{g+Dq$D00RwiDhyAS_?1W_MK$}_>oWw+Y8dbHEi5&_j z-&bT$;kY-XAcgWb471n85*Ly6LK7nvt2;xB_aKM8fG$Ovg)GjjDHAT9SbZ~y!P(?S zpwwU8QlBKrxzL}|?|2)gcx=2Zub>U@l`;ea>pR+t5ZlsFt=0JN@1LKW6OBgP3ASTj zzPF56yO#SMohZWAVjOI2^r7MI;*u*bmblh=+{BEKA+tY*p@a+Hj$9k05~QiS=KkknG<{zmGi`iV=cYNh+D}&NDO_*pI2+wq>VI(Y8)Gqovnts>N`sqgP zY7B8?&$K_5ybB>nK!+-~*V#N)i>n_~LQv2Ee2YeJ6EvOak> zx*M6QIVe7rKW#M}`u77`fe=n|ux4P0-hyPGQ+FzNlbU5}^{KuT=4PXj$5P!i{3Q9t zTOWTUXJeLB>mT7Ux4~)5qa5+@$&)7!AJ!BBIEAI#Ja6{H&ij4v{0MI#V<|){d%q160|U5U@$dpJkN`ft;)HdFnfgAq96Aaj zUwW#Ws8JP|0MET*NRKH7Su|Jt={(eO$o z@E3t!3C7-%RbQX6IC&QYhYz-${Ieh6uD{=!+Pf_yU!Qcv2(Xak; z8DJ$KfdI+4c-?{eD7Ek^54@fUrboLfzs+##!vrRxNB3{8_xrB>U;|%4h(MIL!{*Er z8Slk*9r*F49qsqWE@DYUOk|+%LiW7@>4NdnHal%4e*F=ksSF zA)%4_S7sjnPg`FB71iFgkAYknL`gvbX@d|cL3#-38oH!0NofXYL>dNUfT2O9 z8A9421eB0gQu@CS-uL_7Z>|4b%XM8ioH^(GYVT)1&$Bh<<%6rDGGoULAj_$%L(a#N z(AShb_uQ+3MBh!kKs0zhF#tqr1%!F}+yCPcWh~(Y8 zD4)HRWQnbxEKW_831S`s?CgO+&BxRdFeuw|;wRt;GI{#e93zT4Nzub5N5gP6@M=B8W!R#aPT26yAsKv z(Atd2E%V;37&m|)gKytW(KbB~O_EhP;q9^`@qd!hvR#QTc>t+^uujQm8bCrvaJh3v+B_3i<82B0qnuhp*Q z$_JNEcS!Qw?T}Fi!qSx&-%K{g{TSi>J}d;X5MDshD>8~x-&E+O8&9TZUHX}pCte-> zne4aME|IxG*&&$H!_4Fa+5u!$xhM*aEIo}6@-RobTp<5O>mIX~(FSO8Ix|AJL4*5>?lFt`ib*%Q}n0&D5 z2XMVS4NC~LdL<<#0G5M=TTp0djFVQlmlWAr8#s$ap&bm}m8UWXfX6_^cI%d^3*BGv zQ!Te7K^brZ==~-&vKeoIUN$(DlsE&8jBt_q9J` zFDRzk&+Ydum z3_oZM^uPgFYHS3$W|9}C&*MjrnD*yEFoN2nq@vO< zcOV^gaPfTGaNA3xJ?56r1&|uR7H(`}(n9tUz4PQ%cw@YvIz160~mBQ)RyRjrTK^bIS%w_|Uu|;`pK9pu?FB2bxR1#6Sf{mfiI&DT|*@X74bj$=nH_7t0aEa z^n>82XHTjD8n)xp#&U~BT0kfblj4YaacCJBLoxeYX}6S_%L!QE$HzkKclrM7w2TZI zv4BqT?L2Oj!)wZKrU^;hC_~lB4MQg?xce4FiOG?Lz>Ul5Z0~n+IVl?9-M6j)u$59$ za?#BJ>z8=j{vN}&Ln0KFCig9OWoqEebKh9x+6YYp7hPMZy}^QtXfkZ9gS?i5gCiVt zj+RiVy9y?$Z%54-+6o3XUf-{jK^WF#f?P^BGTSHB1G&}5nwX1~aM2seI!M)@T^ zfe8RLG(+9O!ouaMixlt62_y8CDqa3Y*287Wma^H0WP+&;93y7nrWBJz1JIWU8YQzs zJ)hW6TVcOI+)I7=oMaW0-O8VQIy`r1xpu3s03<1)^deX1KyS+Y2WoCx8DMDZ_v+Qm z`;6JSIg?1%Ry`RCpsV(!+ko5=)>xRFK|<1A#zgB+5zyC-Du*ooQ4Kio zd}+pLocZJq_%6Ic1kHn5I%KDG65a{1!8V_FwjE$+e0SeZJ08<}Vr9B>6o|vG(AIhx zD#^catGDKruNSG+iUZdIs0_jz>Sp9ZNTw-L8btTI-U{Bp^Bs*V`{x3Ds5hFTu0M7P zEewM?tqqrUaTfd!zAp^@T~PYSTmQJ9L01A3p<+a+RR=Y1qW@O298oThgN>~z2@RsD z8TP9}YYxG0HJOEhVis`?)qsZQZnvrL?pCRmzq{8NcJeiPrsw1&$2-5LXGZxtbToL0 z$^tWF5&JaGL<38D4}I#zJs z{|*>33~y+hV0~d+WFZH+KA@EX8xaJc9-I8G3tIOUe^8%}6cyfa%qIr$e>aOD4!59j zUfV9p94jlm1;Jl4*P;vE{^wRRg{$g|>r3kaRv|xe;)Ov@Hk*tBnuwF?hb01qUpy@D z?6Y)!-e($Q-Q}%@-EH>7D3@E=vE_1@M4n*@Aei*isg)9V4FC`B1u3)Y=BYiOBl2UdkK|UbW5Wwod%#u}!ny-7(^)%CpeGaYaEiBV4ZWCqe ztuui@ESmYFp1}3RLjX`VHfB~8)_wnzG`D_&^XS6EXD_{x*vyzfRt|6noXt1+_+Fp9 z1fH9bk&!I>v_Ir^J%_!1eIkPp)@!9*5L+kqH4rG##4^Sj* z2_YN@x^^P4!=@60w}xBtsh~1v46F!wbKS#FQp@cS0A$vpc+Ps_2JtVYdUH%fZnrL{37 zFxARx#fYf9eQMu+`*cWbc~$;mUgJ>3{^*>ztE1?&-EFdt4Y#Bcs<{L&@P#hgs2trO zE^QChTFZM3WQPtee+ZBj41PNMY=>Dz`X!N3PnDzeI;xS+8q0RZI4FsW@tG);$lQ={ zp3az)ho`fn19M*jEUn8`a|;U0O3T#o-nTQPH?EH69wS?mO`Be|;eSNkGtljU?zo^t zbHQXNvOY`PP4|s*&5g=-p{>wGsa@?Pl2W z&cdHf9$^0hwLEIj2qd7)dVp14@p-9`P{9az5rtl#4Wr$QVgKh)`n9;<$jhWy(VX~+FWsx(PcHlw0WJ4)tmRi8-=r;-D2~Z2HI;&3bofh zr-d@$Sy3#0#ix1VczXKX4@=^&?bi6JDhn8rEB&< z!Q0W3Z{<4K3gRmYLeT2E>PFxDz8KuovAR;Z)>eVl9=@~Y@-UE+A?M0Wkzh%ZSg-E0 zY*&t0k+qsPLj8%FJd@WMY4}wYnC;RDC_&jPwuTW~l{bc7KZqst<1gkED2jWTO`akY zmoR`Sf!2#inCq}JV(;ARWDJjvzR19!xaWU*16?BGwUXWXcC5KAmrUMEw=vDE4JXm$ zzU7|$#>D%!%l%PJzf6~b9|dmstg~TU?#QAC?H1!i=nWnT?ijA5H$QVKU54Y$f&xj1 z%xfYGGZG!(Lfxf=65ed)+ z&w;=3N2oagImY&ZrtjS(E<3upowIjUyu-MZS}_In=-oN}{qG4f_%dJ|{p{SjkfD^! z?E^~&xZQ$oXFt|u+;zxN(J%K5>uP2UkBYkR^+hnE6sHtJiGQBy$!saQ_~h2DTQ6U| zvJiytFJ5Q?N5VAJF~NnGy$HxEOMgOL()nQ@ps)I%U0JVim#+1k4N z{;sqkz8T@bWHah0L0)fcoVF7R93`-YK)9Rv^v0ZATx;+#;~>!k`nZh&Pz&8>{c4X1 zPY@3uyQIs3dUAe0h{)Utw5k#R#SA=N03;9<6~bdC91j%LW;uK=8COkWP<%IUPOgFO zWk(tRv2J8xf=KQO&-oRqA4l(3ODA#tD!JPT5~ktdVW?d28Z|as{9J>t<#=F1ATE$( z+Ifyfm<4&SJ((*6-AH6VlGADxnM%pXG&ME`dbgkoPVyaF-@Vg}As7dJ5zT!5hi#NEj;LI1HLS5!G(+&J_h|-KrO(9vt7Z*RBA1Gvm3Adbq4*xrWxqe*%OC8`U zL1IaM>Qo}Q!$9bxlyLiQr}~Z2k{=vyc=PH1OjL#AHy|8KWy=nQB_0~8U*R+Z`Eme} z{Bxy?Cu=9=0v&Gh1TX3WGN%sI(jK3)V=5Fpe6qG9-rfzy18iBMyJ z9a|JdEqlWb;0uVaLB;aQ&(F-*xY;U3lL{sgFG_I|K6&C8ae24$>~-yDzl~2Kb_(`+ z(5wN<;_&X^1`42$1th01f>NCr{Z?dO#6I!hJBB0OYXFIeIG`BiboAbdAZ)3~$T)_8 z0mNmtgE5psB*%N7ArXa?9O6Yq=NkjI*-?-OO#LB6KOM{5Ayqp+> zGl^p4QPjJh-;8N84l+Aewxmp_SKrKdmX?;5o}R0(vi+`fkQL$h`Ku5{rAST?m*dd7 z+d6Yp?=DVQj?#DUxk(P`W=*t&^E{N>Y|osPa;Z??p|!=Q2y9>z!az~uaHzN1!&Qh& zzqbW7fcFns|}fe}tf{ME?3TeU5%EOoC)F4@x2~%ZrjrJ8cQ%Hesu) zPIJP|YOj6`Sy~(5XL)r?A9$Fh$8kzdM-G%+dR*N0dvLhP=BHV!k7_h??Df&zx*+Vz z0QZ-SeR+k~-{)=?U9}9|*AXesZs*#ZS8z69vWj0hu`sWPH7+!DYUiPljh= z%d}b8gpKPKU+-i8;+MaPAgV7f`b3JAv`rt@7YntKKJ!U z?c>R#)dqWor6mTXu@yBi=&bdzdUD%oeuF>Xn2e||>~#$~-Ylt}E8exWlqlEoxUN(! zzEMau>kno|@^$`YQ@Gq9h2LbSB8!F&hXy$S3COW>kenQ%;UTdTk3W4FI> zouPIQcna0JDGOS;dU7r0UU%lGOvaV(mr&@bJ+A$yAwo zNAOo|1Zt}U=C>Cv*HriA7#HgDSgkE(wWO9Q)@WcK_Uo+XZDnm9=`jta&Z}V#68eLh zI0*c0?LprM#IOM8mRh6$CFD326%#YZi;~u5OCF?1C0Rz7n?w-Y&t}AX=dJzB5ED|{ zc2Bmbd7vqmZmE&>EPytv;+hMUWwt9y0JA(3qNc|nJzQ+j^Rw$c_Wl{*GZpBRe~-sa zu2r5bMH3CHn?Pn2o0zDprZyRI?;1CEdh=6`Bx5dA<0(qMfYAXAz7x+sPhNe+{9v ze;-T#nuN#td<%g9gpi=%V35k)Q&o*p?gF(SH5KMg5?82V_Za=xEY?s}s>Lljx8EH= z-6L9ll!_C!aF`O$Xw>q2ga;T4_&;)sTe6WG!39$uGta^KU z>*^++@H@-V&k@e|%`_9$sy?D#;@hR+v`ylHW3>vJvD!S3+ex{`W<$0);ml?6vf|~3 zYx%gOPhD8Kz{4GH4a+Yn?ouMkN!+AV+mbXIjfU6xygg(GQu80O%^0?;XRp^K8^>+G zwG0ft8(CAWfF<-D$x)u~QgHSqhGWC;Yx{XusmV0E0p9u-CZYkl^|xR#Bab)`7Ql?QGXL#w8;857~!jk^Nhb=yp=hR7$)y2URECc5#i)j>Mh z7<`E(Ib!Qm>bj8UAG@#4rX2}dy1EPfy%Z(s?C0bH>!U94-@uaHi;ZEtu3Ad*=>AiR zH;;`xU5w>Ic`j@-o-YvAf%A8@`9s;WS?rGF)2!L>5;u<23(Lj4>^~oQD+X3O+g}`t zJj{b)_MtKz`g)Sg=dX_x;|vj$hk*eB;XVjB05A&>kb(IDjc(*HoYy%ykscc$8(#bB zM()eDGh|Kdn?$ZgP71N8F1((+potMeP3HlkAYe8|0PsYXxBrQH3O zP74(#*@~AG?(`kWsNvpryT)lafvx=Bw*7HFt=4t}EJ6=vWLy9E9Gru{o%oU~tk|em z(4;8(yR&2z68UBehokkz7Y_Gf`3*nRl8ebIgkybu+Ehu%npduA>Q^3o8lqVY$}0Ks zQf^m;L{*j{R@6v)nd5vUC);R@=rVfq720i#A%)5`_pW6u15M`ZqJ3p#SAOO~N}YzB z^J8sAQVZv=h^;qcd|EI%wEng&1)j?lf&To4=fk;jM38K$HXIAM#kN1%Eq6T|sI-F5 zpAE8wxzC4VUD)E4EOFlTk>%);wToVZYd9KV!Omx?5)Q3PHe1?-)TXv$*!CKyw3PYQo>*y<=3rG>H?)7>Xf3$$B={$4jeaa2vAw!C|{i? z_B5wKe-02TR)GE+0N(dKLH7XC41jx0yDAd;)N+nvD)LWQ(yfVAo9NFAenzLgfJPR6 zzgJ((EO0X`z2OBmuk^!BxO(ruS3k?NR5F+eF*0*s&0Scjckdp0b%scOj%Yb2tKh=$ z+jyFzaaAR0Hl{mSI&9xuM54F?)p`JMS1R@KVw zhITK<nb2b4-18*M-VrSG|sUj}|TuCG5b4lVv&)^>AnE6~%JA#(3dYM>sY zP+B4*2#U6D=FN^4?mpp|JNUjZu9#t2%bT{2X2x>leS*mlZ&!ke<7gxn81BGZ1kq}c za?5chX@2OG1mWwZl>K!U_FQ;*RrlqF2|K_2FWg&3lPdXEwX^XPJ?kU8+Mld&#Qs1f z?_Ga98C@j9Xh}T!yoa{eqI-ghWn~hQJ1I3H$Dp`j1c4Dj4Ib3aQx^BW_Q1y+@FXWd4#N zI*gC_cdtTd-%2i(V^cm!Ig$^}#56$@tFP|^mBIKDC203kgU}Dx=ZQvMQ@)+zD{GZZ zAM4!kgYGJki87)>f9%*|JP92yuxJ|tvZQNJ7?j1MzddJntbfhXpS#I3J<5dASm%iC z_rtdD>WS0ytFFCny@KP`@1=TBE*BsD?pt7W3*S_Q667>Ahr-O{CbBa!q$Sze*ksZk z_C7o+rw4Tk2ja}i4GBld&oFr4n}txozr7>%hK!M#nJGJ4zZuhxdZdKOwvn@vqEw40 z2&Hql-xOVrzVxGGg>fLsy}mab$lPcM4ySn3hv0OM8VAPAno#41uX({B=kE^U~% zJ>C0eB6q%trplo?&EhO}rLPL?E~+A%+wmRdP7Fi0%~9Q!h-D2u5i1QB%Vh8qi$aJ6 zZ<~uI)rmN_6P69`&Y?L9ME1Qm6%BVUwB98~sGa&bP+{jH3=4Ww{?y_NoXN{#rMO!y zA94XEeg6D#A-6qG=YaTuKp9f%#3-)#(iFXI9a7x|_j3N(4pGy&Rhvmvqx(FepQ1^d zYR@4?&Mw9r?+qJGCyk)f>~Pa;PrKYu3|FkBoB{>y+lbUf+2T#j!^`NqADS^=`%t#k zYdb5QnFkH^l$7kEBO{IM>r$3_|Gih-WJEU1+S<+D#)b66Se2zk)zpF%LJ{d6H)?-p zS_1kZU3ZC&kLPeO_DUxE!4obhW_}yMV;>y} zs?>8yw}4bu_*L#QIzSir-502+I^*~`goMfgnRgww_p91lu=U|snS8u438Uuw?@qY6 zbhEyUmY1a5VX_shUEh?_cN95;k#ONP`|G^yzu60Ofo}%cdFeOzIS5umzFrhV-9dQc zCx%zAogQuCK$1g1L{zl2kYnGG=pfqJ4+Wuyd~X2rzz{`YJ_m$}a3VV#4$=h&H{21I z$u(wP(Ba%-B?*J3g_}%Zxe~^~OPmMKhsPW4^kxNmk8+BVzBRJLR+H+ao?9fd|$U>^raa?4@nyo#6g2I zQI#a>=3rrQJ^#mvNwS0NdXk&i?Xe7~yHn4(Nj@b5XAmI9UUzZs6x&#x?X!ZWQ-&H) zmOE3qH8i?6B@#MkKYF7Hg!LTzxMK2aK@uShfLVZq5_lkbdQ7OAJC7gt9H9_}6T%CI zN+^4hT!Ec3Hee1+W3HmSW6gq&jPeo(@zyBd18x2?G&P+9BQggE2ME#LOVnzaa-;C_ z==%k9UZ7sHS;N8(PwSD5!>O2t(2MjNYGO8Pg!W;u@AheK{h?g^TF4?G-~!8QDsl3Y z-TP}W^++Eiah3hPy=XhZSD3u(N1Yxq;5uxdH zFaTtZ+S)W^AxYS=m!c?!xO|2$`Cf};p{t0yD)uy!bqlaU!N5BYc*R4a@4?Tn8hU7+ zPFRX!JlY(2+u7;4V5houHiln$u179tCRc3F5xYm9Iq)kozT^lk0thnQFp+B#^~oj$ z5mDGC-EG9er&E2qhpd2Ija@6x3xyTVdKhE18H%8OBsh~7Fi%fk;+u#yOXBspGYjKf zS6A0=>v_yL=q*>wt@=X|W^;Z<+Nx=Ix9kNaID*CW4zPiiTQZ{ZdJ=2?>%Wc+4v`8r z_j?qWJ2F0A+5g3izphh8MBAmAp8#85H< zR)V{l;BFw(5F-qH-x+P`a^i4okZTL>#CA=9{;C4k{sqtRcJIfk*+%L9*?JNY4 zg8?`Rv~HMLfU|GH%7hVu8u)~h-iIfLVw8{+%Zs>7e2X1<)f$;Moy^(e?$lEwVd;+A zq3x?pm+Qk{%!pme>pk~f+eGe`oR8SPec#$7v=2!Fg=20VR3|-wiw*wAW$D21D z!5RrVZ2@(Yj(_yoLHHAa1&G2%*IG_9<_!PmEF~{@i`XArMn#HC>i10Pi`M+8N)jx@ z*&&eziGXk;WKIZXBT?R+099n-;FG7qWsJ{Y!ql*$Mj6#qOkNtt%6(0_HuUUQB! zhaL%WSrj%IS`imcrMK%cqY;(@!opi1D%>sD-{KMSAAB=EQW>+En7~u1;eso2^*aio zk6C8-q_$i-;{oy=#?@p2T^IfRZv$&z=IF;)W`Y!{>$L%*a5tC(Z+nOcWo3{19YJgo z0RIO4RMPY3&p|zE&k0@yR}W8s=y$*o_(6cwf@;8ylSD@@YzjV%NMi{JQ0F;GFI=3L z7kuQ8A9}8V zuP;1ynobkks4tWJl7RgR@+X)>zr2XdTB5#;1WZvi&)9{KCDbx6+S4d!(Lao zf9SA2{-akI@~UtxfT;sW3s(QcO%M`4U8DwK$0rulkET%lYIlAQhL0nE9ws3k*|*ii zHXUKtk(HDCVbMdv@KFP<49>yXvxTF=aA~g6;mRk|#| zJ~P=M^^3tF$9>8#&AF|k!_@BuO6$XuPr9xnQW-j>1f+a&*bVOc~&zrUs7Qt0aT zJ%9~D*4HpmF1F#;ASw~y;0O|U z!%^a-sri=Ss~{BofG`V`x;8r=FWw*nNv}u&n_=@d-hSye%(O8iyVtH>Z4@mGwLDpwtAnXCG^ z$sIQMpTuQ*^N+C2dwbEZTYt1$pVzw1##6l-(Vm3;^5s#T9}o@T(xo`xS7lrEuw+w0j0+q&`;q@x*Syy?MVdUX1LATeV&HKa`i?_kY2UB z7<7v7=S6{pUSJBv#5nVNssBf`i;Bbrkn3Tn1C>e&egJF-I~|VnTLd;Z_+-36@7NNu z<6IIz1K(F3ifo!u=IyXw)qVclwjC*nuW(%w2Oi>yfmo1Hfz{v+GJC)os9TN)vV;=w zbrvjwurznb1kAIRYke_^B`FiF38A$uy{Y zNnFTkfm0uRcmmPZMBHS}_M#C9?;|1S*{Gkr3j2Q7x&U!(V@Y2XMY%UuF|nJ9Gnt3q z)l1V+&4Z2H{oxiA>*jE*qVUXo`ueiKQ%oXPdx-|dI-d+BWeIo;JOUWL3R6IkFZ?)C z=}OCy`Ml2_jyn?MSneeB$LM>z4Oy37a-F}Q!px_urB&{;@9o%*8w2aQ!AjSHhQrJT zlxKBuzPBUGdPn#7T!Z+VWtKUXvnhEV8$fsDHd)ELs)_pLPj_oH?DL>%8?*^UQ_~Yr ziVnk)L}9@Tu=;l+?Yxq?)ddHjr7DU3e))ITBxQKD@}}}Es18B4w+zLoV`n~Hh21m% zLJ?$Gw7(Jbj&v3Mj4g$foSFz-QUd}5ao$z*zP!s>GB^($%w}dtASogHVgk@Q&GaVk zIbSL{WJ88Ylck+P>glV<;15BDC}`*ArGJ&ED9~@yBX#Y=Lfk&2j43zCQUSvU#av-w z_{cxvq;7bp1gnF-8ZaLGNC#X0prG9Rd{f5f1$Rkv?<0pG5|WDa4h#&1a|J0aghfY} zz$#m3`@{4te^aq~+d%hyl^=sFzeF(#XqoGT!8Z-!@r3G&d!s(;NgpJFcAhJTjWCciM z5y!r_>JJ_~s3h+j9!>}g`v8=&b%sB;Cy3ql`Bcr6pSa8i+Svmu_L3oesQz^Tze(nK1xeLuHz*_@UWBfv1dRvs@=-@F zQ<_K^slyA=VW!P4C-J1qF9i3tTM4$qZ~n1X*3seYv#$&o>*?u1(k>S&Tc^G^)lyxU)}8 zUWlTEYXI%!dxIcil&9F?!~cB1Ju}@@PvUC5b%3zk%4()GVfom440*(Rc+V{x$-tvH zsTYB0WdY;|$i#Z%hQD@>A(CrRI&Fsu6?F<;l_7+6EpOm+4>#}PuhGItG)T5lo)<$M zT<^%=UpxUTF6h`rVk-KPKp59D+WAR^`<=@5^5m9&P*QdzODq-PoPiLK62V1r;hl?B zaYn{f;1JWOuAEz|<1M}L!9}hCw&VxIUv^Rkj?a7;l+U|CR68j|bT_~Yuu2ii^gaF@ zvW6OAVd>C*Ylxb*oQViwXTo&hqcoHPYIhfG0dy!^=@9YKLW+3&7Tsw;#Q?j`r*ioD zD9T+Hq|(6E*1xCRM;x8)ksI20?P@Hy{$bvXI0WfgGp6WN4ngi7sz=Uka5Y&-`SB&z z*=Exjyq6duSMf0|iBUxI)q6Ojc^8c6c`0-LmT2z*5XkAfbf ze+6ffSxn6+d*rIZlY2U5K62`7?U_j!ECxeKEN%3zf$l=MN%V`^<%lopu2QA>gJb3% zYr#?qCQ(`QPS`(!HibTOD)!(@aW)THqc_W_bbj?Q6~Oh`?5=>V^OA5+8e_W)2f0ra z3=k<8CS1@jH@x|6Z&O$8s$#XRJ4fGS@OAB}r`C%`8Mze^4LKolwC^Y7A5-}Y#B`Zh z4G@s}EO%Jx3GD$}$MMc@s(s0vLeo@(#OjP#D=KrX^Ce9_`SgPm?ufF$L0P#AU<(6L z=37ESn}a1*;@*OWCMM~HpH%FD=syD}GZQF}nu zJz(%xf=V&jxxSsA%AsL)FIq`TlXEwV^iY+jW+BoAnRR>e{e+m9rZYt+<{0M!Kbscv z8qW<`_Vux|TDR}e8WT*9UqP(53(zfBG*Z(E3I*xEhlmuD9kGQLk@sXS;~>de@1Ca zd(pooe{%o@-27;aqKyOUEd^(-rD1Nwm}7eNV;ZCEB%9_Vv2J)?q_s zoUfj#ahzvC_IDijGvpw-OpIu?S~%P@*Pvv1WGnlQhQ(v4{mGn;itfVlV7IpZnUS2R zA+6hHaXEt%u9MGy3{DR&-owiIQ(m-)o0v8>fX4v(cY$W1;g^)Vlxp`O=An4qI3&OO z0ZclB-cTGr4*4ku4^OSrOvJPa;TqQzBfk%fColC#Ve0ow#E9Li9!eC0H_f1j+bY`G zo4Tv&U?fF$rzP?+G2buu3^x@=Riga@g_%(Tr4RArdtLOwM8wa>qA5z3gS1=fVAg+hL##&vESGzy^@j&ZI}Sk=^q&I z;4mdr6>EB4kr__acFM_m`o~6+Scqd|VPaM% zgXD%E2G5Fg?d1o~;hDA3mU5QNBdh65<23pAX+R{g;0D8Gymrkpy5ak0qGJrO1N`$ViLOzJ+DO4iL7jsFHmLT0|Tj6 z&@Bu}{8pIaaAaWA$zUs#{ouALVz)(b$8;7}5!9Caaba9RajjD~Qei)Fv_BQt-B_Rd zTT(3Ki=nk5e^x=wmL+(6AnK~?8UR!M9ojP*m`#V2H4`TyPU~<>5dfDw#$hqJ6-%d` z?JkuY-WXKeK0%P5J0lhQUjMQC)fZK_3W2*R&Vd54ilv&t`9sVo#YE=|$KCPfkHWcHj~zr;u(4eH2M654$)-=oN`SZl zKo3}{0g*m7Hg@?r?k-&IIrjm|auA{A@j_JpLt6P`h9`f0h`$N6?-U>OIevGF?qCOy9=CPpUA>_;PkW+=<1o~@<1?m0d zgMY0@^TM;7hv02C;8;mbjkuWD$Z#>Fb1t z1+y0GzHhcCvT<^@8L_s0l>+&u0HX?kUyAbb^WZ5BIVvd53=GBrqrFahmlQWyfFu3n z`&bM$##>f5Iteu5ouS%Y6f~zve@)M4?4PS1pOC=GQ8kxe2_AhjIgI}5dSr+e2f_{HrpWCv&)nC$5LIXG%^(TW`b>=%iMB^Vme29vQ zf(!)s2c)u22VVdUQrj>?XLFwd)xF&!6wvU*G=m8s;n5$0Y&G3PNQFSGWSN4k4-!AXc^>N1%E{n++uB z=TG|(s|pZdx{i8j$Uorug-n#I`Md9+&nNg(KhocadZUzi{eq}y&b5pZNZ^`Cm8%dc zEmR#1EmHMy?Wu;$#uwtBK+&@%8p&qv;CI47?#FuN$|}^PfXeWN^Bs&U)OjXfn8Vxh znh2L^Q_6*94^`N$!X^gTw$^q#Bnpc`QZE1Y#yi)gZ$P+U);mO}PvOJb*}p#y1Yt-~ zjS00`U^#!=-VkZGh$t3`4T01k$?=61I?Y|YI-xM6ewxfT@MNb*?B&d)#n!OMnAk4PlG zbVC+D2`o8v-|c=fM$ac_bCKx)E-4jjnSfL8fEjC>S8GAMuh)d8&8s-t$)z;8Z>-+$+9RGa8 z;ATw{8QJFH|IU){WCH7fkIw<#7O>@|Q@fxzK1qo2NYT#(O*6=udwo>(K(D(0ovIdy z;W9iYe}mI4Ya3zizw+qW2SE(y&qL#hw2hlHHO^3NfcBA{!^hsn#+E~XCxnI7;&<{A z&U#277IEJJq|L#JQ<}EDt_oU&0Csk7&pi~R42F1L1Oc7>2Sme$+CX*VWgLjaS3#_t zDw0He4Ps2Bk^$`#p18YjK@4f;WeQY(Ai{Xo^Ws?a0Myue@j_BwN^K$Seox9NB%S@} zI_~+yrk*6_-Mh=;;(O)ONkt*=Bet9Xr(ihoDm)nlF%QxJRsbA}`iX1kLy!Hxd3R+BroRFNbeSdd5!6sb`FZ9U5Z z8g^cWt(hljm6P@o#CN~c+3v#GqXBQ`FW_^zCl2TajBnb3;3)y{t(Vd%Bw;6$`37J+ z3xbgNqDu&%h=ndJJKW@!FvVeKNfWw*ueyd({eTD^;Op+;=XoIl0FYbc_+igjH~_%c z-tkO0mu(9~T>s)8x%5Dkg7W`PZ?O~9n1Bm#JF5AB40@ClK|Oxzb3!Bvk?&qoQBHn- zKD;nEy~cf!I_&{m=9;2V(gKkbcuCc(9v<@{0vHS1;_+|GcZmn6z~(6)&I}~QM_v@N zgw#`h!;TMqKWkx1KG}N@vCH|YNgE{h$5;M(E#%{^0olKY{tS}0HzL1{UQO)S z^mFi7Q1CLu@4NvBH^_oiulr?NTN{W396U$ZPz|+R5i;Lym@OI&Mn0yG;M&}7&r~F+ zqXJ)}H);O!T5PE8f%Oh_i4#>m*up~PD~Ou`orz#ef8P=9m@^B#+Zm-2bdUL*%p=`* zTjmB2^5k^=DKF*MT8NaNR4 zV3XkS7^8)Zw_Ly9H7LdDExBt2(VDyOx;%rz^Kwqia`6yZ@y*~rcTxp7?h~V%`;}}2 zS};7~D9l zm^v|LPzyRh+|sjviW(fu$<2Lw-sh%-gv8~`pLiIACqi**yB=KZ-K)aZ!T8w$EaR@7 zyt7x!t$6_~zF2{H?$B(P0#_an)1L837VUyhI$f(gG+nEJ515%2w?g^-?J*9SQ)=*4 zFsnX}K!;+txWPf~@>NcDcJkLg0%BrbprogBa&5-QAK9|FIM??;KjzG1zexF@WYrZ$ zn#{&S+w1CH%GNFYi}zSH8wVXSVvPw?G;JnwHMe=Q*MeAs)OcbpqsRpC`5#akeQtQu znuTq~0Ax?7MfL>A(X{-0MR|^=d_DpK0*vKpSs-u4(Hpd)f<`g8+&CI(qk83fL~!H#LC? z@|CQ;ISA|k^*mQKd78LIVCc)dDEMg74V^?a;q2#8HQ>l1{RJX0B#9EOdA1kn94{?Lo=iI;HeKOAsG^v zfJ3Mv9;8^X)7{`oged##*RPPPx{xAq*1zY7!yzb!KtG$OUS48+2GBI;CG?J5KZ1nk zI8tv4BzAC51mvBU%iW&S+}!*p(B`4bd*xxcI{0CmozPMGRh*a_4V*0I@+bv>&_RZ; z@awO71R(7%MSXWc#eNBnJ`D}O`^8f5%b$?_4>mMpbewM-6XoQDHl^gxJ}C7&e+79P zZW1JyP!_;l9K)4BXb2|0u(F>}EOhtwMgdmuzGnIN5vacFpaQs|Z}9{!{Vr`;K{c5z z{XZ_I)2@6M^8lSjUcjyVH8D4=(sZB%!#_W%{}=Z+4mxsZkv=dzL<;}$yh1_>yP1;g@Gz9W|2;eaSzmQp^L1HpL})q43AgcavvI_qvgbt6 z0r5x1wX9)L7^g*0Tx#xL)J!i?)$T}$Ff?*#>N?onU-!f#rsD@7x_2;@i1yX1+28`< zVCRRL&mVaO{2qdn896tfieT#?T3I9DzFg>{F{VW&rNMPD7jFT0M zS@qp`d^aB>q~{@2>pZTcH()BSeA9&Y{^1&IKc#7|Ef$qlU8JJWxcomLW`ABM=!yC+ zUOCaZ`O3w`HU+lQeJjV%XwLovd6T(NkLu{#wf*JD_-NDE&O*JS7k={kQ`s`Te701Y z6;VIpr~kY|uregzmD7Iv{RyHiTC68Zd$LX^i5DyVoSm?R5;(_9-c{y zQi>bx@(1kOIBSbg6IM_JS=8j+)i%`nQ>{_#eHM`KlE+I{ULIDARXL`M?pnD%O*|%Z z6g-xro|bXOe$-$RwvR7qnqZ8fXLU5*NZKy*BI3$T$tYqu&kH_UaMVKE6~;0fiY+?`w;fp`XDy z6Ku~h;vDF?x>*LmIs-w@$ zbF2PP_`Ao;O}QRdW=>V=eoD|-C54dy(OG|+>+e(c-{_^$bNeOVj5=D?QC2R$!eV(G z-Akd-g1)QqN`hEaGv>d_XA)u93T%#nfctF#bO)yb*DNy%@$*iR=oXn5sp$RGTW!fz z`K)gqxvrB2qlr5{){DGmCWMY8KyZz1=L#mNCSBj?yW0Kdy8muEpKOig#G~tJ~JcOgBe?+{#+o^QKh2%z`={hH-3xB0!(O4~t? zGAvUg&MNgvS>opV5`8v+fzCy2j=!5_D23qSke-E6ac^axyJ9utBUcvf{aWh<-dF&Z z>RfO=ZMNUg9CyA=#nH!b@xH6{%ihZb#aY3|$QS#`u_+QgI{L2{Af5p3DT&ItgkuD=8 zMyS`pXBwW77{X=#VY)q9MjI}`aIoPc`;E zzqedtHj9$fJL( zhj1By8QFcf*2^Pgr~NJ7Fo;!vbQd^9hYjK17Y!YB)m8v{Dbu)Rh zt@P$T*xrLHjDzzV3&h9!5DCdsAfLlQNCMCNZ#|oYge2iPa=-AeWoQ2$(?_FJDY(4hR5G|A1GGp`js^%+CN_1=6kY z`XeO1W8*Ds$vSxSCGdRz0RZZ>=w?yCr)jFMH#gmx zQSv!=9Ex)ZE-YSX1JNui-`?0_W#+eWj_+U$i+xl1y;|c%4tB3!P2mL>P wKX}T2fByIHe?R`;fB*Zd|8MWZ-jnD54<6nWCjbBd diff --git a/doc/source/images_src/glare-architecture.graphml b/doc/source/images_src/glare-architecture.graphml deleted file mode 100644 index 283cc79..0000000 --- a/doc/source/images_src/glare-architecture.graphml +++ /dev/null @@ -1,876 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - Glare Service - - - - - - - - - - Folder 6 - - - - - - - - - - - - - - - - - - - Middlewares - - - - - - - - - - Folder 1 - - - - - - - - - - - - - - - - Auth Middleware - - - - - - - - - - - - - - - - - Fault Middleware - - - - - - - - - - - - - - - - - - - - - - API - - - - - - - - - - Folder 2 - - - - - - - - - - - - - - - - Router - - - - - - - - - - - - - - - - - Deserializer - - - - - - - - - - - - - - - - - Controller - - - - - - - - - - - - - - - - - Serializer - - - - - - - - - - - - - - - - - - - Engine - - - - - - - - - - - - - - - - - - - - Utils - - - - - - - - - - Folder 3 - - - - - - - - - - - - - - - - PolicyEnforcer - - - - - - - - - - - - - - - - - Notifier - - - - - - - - - - - - - - - - - Access Control - - - - - - - - - - - - - - - - - Locking - - - - - - - - - - - - - - - - - - - - - - Store - - - - - - - - - - Folder 4 - - - - - - - - - - - - - - - - Store Manager - - - - - - - - - - - - - - - - - glance_store - - - - - - - - - - - - - - - - - - - - - - Objects - - - - - - - - - - Folder 5 - - - - - - - - - - - - - - - - Fields - - - - - - - - - - - - - - - - - Validators - - - - - - - - - - - - - - - - - Base Artifact - - - - - - - - - - - - - - - - - Images - - - - - - - - - - - - - - - - - Heat Templates - - - - - - - - - - - - - - - - - - - DB api - - - - - - - - - - - - - - - - - oslo.vo Base - - - - - - - - - - - - - - - - - - - - - - - Cloud storages - - - - - - - - - - Folder 7 - - - - - - - - - - - - - - - - Swift - - - - - - - - - - - - - - - - - Ceph - - - - - - - - - - - - - - - - - - - - Database - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/doc/source/index.rst b/doc/source/index.rst deleted file mode 100644 index e6fbd75..0000000 --- a/doc/source/index.rst +++ /dev/null @@ -1,58 +0,0 @@ -Welcome to Glare's documentation! -================================= - -Glare is the OpenStack artifact service. This project aims to provide -a mechanism to define tasks and workflows without writing code, manage -and execute them in the cloud environment. - -Overview --------- - -.. toctree:: - :maxdepth: 1 - - overview - quickstart - architecture - Roadmap - main_features - -User guide ----------- - -**Installation** - -.. toctree:: - :maxdepth: 1 - - guides/installation_guide - guides/configuration_guide - guides/dashboard_guide - guides/upgrade_guide - guides/glareclient_guide - guides/hooks_guide - -**API** - -.. toctree:: - :maxdepth: 2 - - developer/webapi/index - - -Developer guide ---------------- - -.. toctree:: - :maxdepth: 2 - - developer/index - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - diff --git a/doc/source/main_features.rst b/doc/source/main_features.rst deleted file mode 100644 index 6bde6b5..0000000 --- a/doc/source/main_features.rst +++ /dev/null @@ -1,4 +0,0 @@ -Glare Features -============== - -TBD \ No newline at end of file diff --git a/doc/source/overview.rst b/doc/source/overview.rst deleted file mode 100644 index a160026..0000000 --- a/doc/source/overview.rst +++ /dev/null @@ -1,17 +0,0 @@ -Glare Overview -============== - -What is Glare? --------------- - -TBD - -Main use cases --------------- - -TBD - -Rationale ---------- - -TBD \ No newline at end of file diff --git a/doc/source/quickstart.rst b/doc/source/quickstart.rst deleted file mode 100644 index 457a888..0000000 --- a/doc/source/quickstart.rst +++ /dev/null @@ -1,4 +0,0 @@ -Quick Start -=========== - -TBD diff --git a/etc/glare-paste.ini b/etc/glare-paste.ini deleted file mode 100644 index 4f6eadb..0000000 --- a/etc/glare-paste.ini +++ /dev/null @@ -1,53 +0,0 @@ -# Use this pipeline for trusted auth - DEFAULT -# Auth token has format user:tenant:roles -[pipeline:glare-api] -pipeline = cors faultwrapper healthcheck http_proxy_to_wsgi versionnegotiation osprofiler trustedauth glarev1api - -# Use this pipeline for keystone auth -[pipeline:glare-api-keystone] -pipeline = cors faultwrapper healthcheck http_proxy_to_wsgi versionnegotiation osprofiler authtoken context glarev1api - -# Use this pipeline for Keycloak auth -[pipeline:glare-api-keycloak] -pipeline = cors faultwrapper healthcheck http_proxy_to_wsgi versionnegotiation osprofiler keycloak context glarev1api - -# Use this pipeline when you want to specify context params manually -[pipeline:glare-api-noauth] -pipeline = cors faultwrapper healthcheck http_proxy_to_wsgi versionnegotiation osprofiler context glarev1api - -[app:glarev1api] -paste.app_factory = glare.api.v1.router:API.factory - -[filter:healthcheck] -paste.filter_factory = oslo_middleware:Healthcheck.factory -backends = disable_by_file -disable_by_file_path = /etc/glare/healthcheck_disable - -[filter:versionnegotiation] -paste.filter_factory = glare.api.middleware.version_negotiation:GlareVersionNegotiationFilter.factory - -[filter:faultwrapper] -paste.filter_factory = glare.api.middleware.fault:GlareFaultWrapperFilter.factory - -[filter:context] -paste.filter_factory = glare.api.middleware.context:ContextMiddleware.factory - -[filter:trustedauth] -paste.filter_factory = glare.api.middleware.context:TrustedAuthMiddleware.factory - -[filter:authtoken] -paste.filter_factory = keystonemiddleware.auth_token:filter_factory -delay_auth_decision = true - -[filter:keycloak] -paste.filter_factory = glare.api.middleware.keycloak_auth:KeycloakAuthMiddleware.factory - -[filter:osprofiler] -paste.filter_factory = osprofiler.web:WsgiMiddleware.factory - -[filter:cors] -use = egg:oslo.middleware#cors -oslo_config_project = glare - -[filter:http_proxy_to_wsgi] -paste.filter_factory = oslo_middleware:HTTPProxyToWSGI.factory \ No newline at end of file diff --git a/etc/glare-swift.conf.sample b/etc/glare-swift.conf.sample deleted file mode 100644 index c3f7def..0000000 --- a/etc/glare-swift.conf.sample +++ /dev/null @@ -1,25 +0,0 @@ -# glare-swift.conf.sample -# -# This file is an example config file when -# multiple swift accounts/backing stores are enabled. -# -# Specify the reference name in [] -# For each section, specify the auth_address, user and key. -# -# WARNING: -# * If any of auth_address, user or key is not specified, -# the glare's swift store will fail to configure - -[ref1] -user = tenant:user1 -key = key1 -auth_version = 2 -auth_address = http://localhost:5000/v2.0 - -[ref2] -user = project_name:user_name2 -key = key2 -user_domain_id = default -project_domain_id = default -auth_version = 3 -auth_address = http://localhost:5000/v3 diff --git a/etc/oslo-config-generator/glare.conf b/etc/oslo-config-generator/glare.conf deleted file mode 100644 index 7d9431f..0000000 --- a/etc/oslo-config-generator/glare.conf +++ /dev/null @@ -1,13 +0,0 @@ -[DEFAULT] -output_file = etc/glare.conf.sample -namespace = glare -namespace = glance.store -namespace = keystonemiddleware.auth_token -namespace = oslo.concurrency -namespace = oslo.db -namespace = oslo.db.concurrency -namespace = oslo.log -namespace = oslo.messaging -namespace = oslo.middleware.cors -namespace = oslo.middleware.http_proxy_to_wsgi -namespace = oslo.policy diff --git a/glare/__init__.py b/glare/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/api/__init__.py b/glare/api/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/api/middleware/__init__.py b/glare/api/middleware/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/api/middleware/context.py b/glare/api/middleware/context.py deleted file mode 100644 index 8987227..0000000 --- a/glare/api/middleware/context.py +++ /dev/null @@ -1,158 +0,0 @@ -# Copyright 2011-2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_context import context -from oslo_log import log as logging -from oslo_middleware import base as base_middleware -from oslo_middleware import request_id -from oslo_serialization import jsonutils - -from glare.common import exception -from glare.common import policy -from glare.i18n import _ - -context_opts = [ - cfg.BoolOpt('allow_anonymous_access', default=False, - help=_('Allow unauthenticated users to access the API with ' - 'read-only privileges. This only applies when using ' - 'ContextMiddleware.')) -] - -CONF = cfg.CONF -CONF.register_opts(context_opts) - -LOG = logging.getLogger(__name__) - - -class RequestContext(context.RequestContext): - """Stores information about the security context for Glare. - - Stores how the user accesses the system, as well as additional request - information. - """ - - def __init__(self, service_catalog=None, **kwargs): - super(RequestContext, self).__init__(**kwargs) - self.service_catalog = service_catalog - # check if user is admin using policy file - if kwargs.get('is_admin') is None: - self.is_admin = policy.check_is_admin(self) - - def to_dict(self): - d = super(RequestContext, self).to_dict() - d.update({ - 'service_catalog': self.service_catalog, - }) - return d - - def to_policy_values(self): - values = super(RequestContext, self).to_policy_values() - values['is_admin'] = self.is_admin - values['read_only'] = self.read_only - return values - - -class BaseContextMiddleware(base_middleware.ConfigurableMiddleware): - @staticmethod - def process_response(resp, request=None): - try: - request_id = resp.request.context.request_id - # For python 3 compatibility need to use bytes type - prefix = b'req-' if isinstance(request_id, bytes) else 'req-' - - if not request_id.startswith(prefix): - request_id = prefix + request_id - - resp.headers['x-openstack-request-id'] = request_id - except AttributeError: - pass - - return resp - - -class ContextMiddleware(BaseContextMiddleware): - @staticmethod - def process_request(req): - """Convert authentication information into a request context. - - Generate a RequestContext object from the available - authentication headers and store on the 'context' attribute - of the req object. - - :param req: wsgi request object that will be given the context object - :raises: webob.exc.HTTPUnauthorized: when value of the - X-Identity-Status header is not - 'Confirmed' and anonymous access - is disallowed - """ - if req.headers.get('X-Identity-Status') == 'Confirmed': - req.context = ContextMiddleware._get_authenticated_context(req) - elif CONF.allow_anonymous_access: - req.context = RequestContext(read_only=True, is_admin=False) - else: - raise exception.Unauthorized() - - @staticmethod - def _get_authenticated_context(req): - headers = req.headers - service_catalog = None - if headers.get('X-Service-Catalog') is not None: - catalog_header = headers.get('X-Service-Catalog') - try: - service_catalog = jsonutils.loads(catalog_header) - except ValueError: - raise exception.GlareException( - _('Invalid service catalog json.')) - kwargs = { - 'service_catalog': service_catalog, - 'request_id': req.environ.get(request_id.ENV_REQUEST_ID), - } - return RequestContext.from_environ(req.environ, **kwargs) - - -class TrustedAuthMiddleware(BaseContextMiddleware): - @staticmethod - def process_request(req): - auth_token = req.headers.get('X-Auth-Token') - if not auth_token: - msg = _("Auth token must be provided") - raise exception.Unauthorized(msg) - try: - user, tenant, roles = auth_token.strip().split(':', 3) - except ValueError: - msg = _("Wrong auth token format. It must be 'user:tenant:roles'") - raise exception.Unauthorized(msg) - if not tenant: - msg = _("Tenant must be specified in auth token. " - "Format of the token is 'user:tenant:roles'") - raise exception.Unauthorized(msg) - elif tenant.lower() == 'none': - tenant = None - req.headers['X-Identity-Status'] = 'Nope' - else: - req.headers['X-Identity-Status'] = 'Confirmed' - - req.headers['X-User-Id'] = user - req.headers['X-Tenant-Id'] = tenant - req.headers['X-Roles'] = roles - - if req.headers.get('X-Identity-Status') == 'Confirmed': - kwargs = {'request_id': req.environ.get(request_id.ENV_REQUEST_ID)} - req.context = RequestContext.from_environ(req.environ, **kwargs) - elif CONF.allow_anonymous_access: - req.context = RequestContext(read_only=True, is_admin=False) - else: - raise exception.Unauthorized() diff --git a/glare/api/middleware/fault.py b/glare/api/middleware/fault.py deleted file mode 100644 index 85e605e..0000000 --- a/glare/api/middleware/fault.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A middleware that turns exceptions into parsable string. -Inspired by Cinder's and Heat't faultwrapper. -""" - -import sys -import traceback - -from oslo_config import cfg -from oslo_log import log as logging -from oslo_middleware import base as base_middleware -from oslo_utils import reflection -import six -import webob.dec -import webob.exc - -from glare.common import exception -from glare.common import wsgi - - -LOG = logging.getLogger(__name__) - - -class Fault(object): - - def __init__(self, error): - self.error = error - - @webob.dec.wsgify(RequestClass=wsgi.Request) - def __call__(self, req): - serializer = wsgi.JSONResponseSerializer() - resp = webob.Response(request=req) - default_webob_exc = webob.exc.HTTPInternalServerError() - resp.status_code = self.error.get('code', default_webob_exc.code) - serializer.default(resp, self.error) - return resp - - -class GlareFaultWrapperFilter(base_middleware.ConfigurableMiddleware): - """Replace error body with something the client can parse.""" - error_map = { - 'BadRequest': webob.exc.HTTPBadRequest, - 'Unauthorized': webob.exc.HTTPUnauthorized, - 'Forbidden': webob.exc.HTTPForbidden, - 'NotFound': webob.exc.HTTPNotFound, - 'RequestTimeout': webob.exc.HTTPRequestTimeout, - 'Conflict': webob.exc.HTTPConflict, - 'Gone': webob.exc.HTTPGone, - 'PreconditionFailed': webob.exc.HTTPPreconditionFailed, - 'RequestEntityTooLarge': webob.exc.HTTPRequestEntityTooLarge, - 'UnsupportedMediaType': webob.exc.HTTPUnsupportedMediaType, - 'RequestRangeNotSatisfiable': webob.exc.HTTPRequestRangeNotSatisfiable, - 'Locked': webob.exc.HTTPLocked, - 'FailedDependency': webob.exc.HTTPFailedDependency, - 'NotAcceptable': webob.exc.HTTPNotAcceptable, - 'Exception': webob.exc.HTTPInternalServerError, - } - - def _map_exception_to_error(self, class_exception): - if class_exception.__name__ not in self.error_map: - return self._map_exception_to_error(class_exception.__base__) - - return self.error_map[class_exception.__name__] - - def _error(self, ex): - traceback_marker = 'Traceback (most recent call last)' - webob_exc = None - - ex_type = reflection.get_class_name(ex, fully_qualified=False) - - full_message = six.text_type(ex) - if traceback_marker in full_message: - message, msg_trace = full_message.split(traceback_marker, 1) - message = message.rstrip('\n') - msg_trace = traceback_marker + msg_trace - else: - msg_trace = 'None\n' - if sys.exc_info() != (None, None, None): - msg_trace = traceback.format_exc() - message = full_message - - if isinstance(ex, exception.GlareException): - message = six.text_type(ex) - - if not webob_exc: - webob_exc = self._map_exception_to_error(ex.__class__) - - error = { - 'code': webob_exc.code, - 'title': webob_exc.title, - 'explanation': webob_exc.explanation, - 'error': { - 'message': message, - 'type': ex_type, - } - } - - if cfg.CONF.debug: - error['error']['traceback'] = msg_trace - - # add microversion header is this is not acceptable request - if isinstance(ex, exception.InvalidGlobalAPIVersion): - error['min_version'] = ex.kwargs['min_ver'] - error['max_version'] = ex.kwargs['max_ver'] - - return error - - @webob.dec.wsgify - def __call__(self, req): - try: - return req.get_response(self.application) - except Exception as exc: - LOG.exception(exc) - return req.get_response(Fault(self._error(exc))) diff --git a/glare/api/middleware/keycloak_auth.py b/glare/api/middleware/keycloak_auth.py deleted file mode 100644 index 455a152..0000000 --- a/glare/api/middleware/keycloak_auth.py +++ /dev/null @@ -1,134 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import jwt -import memcache -from oslo_config import cfg -from oslo_log import log as logging -from oslo_middleware import base as base_middleware -import pprint -import requests -import webob.dec - -from glare.common import exception -from glare.i18n import _ - -LOG = logging.getLogger(__name__) - -keycloak_oidc_opts = [ - cfg.StrOpt( - 'auth_url', - default='http://127.0.0.1:8080/auth', - help='Keycloak base url (e.g. https://my.keycloak:8443/auth)' - ), - cfg.StrOpt( - 'user_info_endpoint_url', - default='/realms/%s/protocol/openid-connect/userinfo', - help='Endpoint against which authorization will be performed' - ), - cfg.StrOpt( - 'insecure', - default=False, - help='If True, SSL/TLS certificate verification is disabled' - ), - cfg.StrOpt( - 'memcached_server', - default=None, - help='Url of memcached server to use for caching' - ), - cfg.IntOpt( - 'token_cache_time', - default=60, - min=0, - help='In order to prevent excessive effort spent validating ' - 'tokens, the middleware caches previously-seen tokens ' - 'for a configurable duration (in seconds).' - ), -] - -CONF = cfg.CONF -CONF.register_opts(keycloak_oidc_opts, group="keycloak_oidc") - - -class KeycloakAuthMiddleware(base_middleware.Middleware): - def __init__(self, app): - super(KeycloakAuthMiddleware, self).__init__(application=app) - mcserv_url = CONF.keycloak_oidc.memcached_server - self.mcclient = memcache.Client(mcserv_url) if mcserv_url else None - - def authenticate(self, access_token, realm_name): - info = None - if self.mcclient: - info = self.mcclient.get(access_token) - - if info is None and CONF.keycloak_oidc.user_info_endpoint_url: - try: - resp = requests.get( - CONF.keycloak_oidc.auth_url + - (CONF.keycloak_oidc.user_info_endpoint_url % realm_name), - headers={"Authorization": "Bearer %s" % access_token}, - verify=not CONF.keycloak_oidc.insecure - ) - except requests.ConnectionError: - msg = _("Can't connect to keycloak server with address '%s'." - ) % CONF.keycloak_oidc.auth_url - LOG.error(msg) - raise exception.GlareException(message=msg) - - if resp.status_code == 401: - raise exception.Unauthorized(message=resp.text) - if resp.status_code == 403: - raise exception.Forbidden(message=resp.text) - elif resp.status_code >= 400: - raise exception.GlareException(message=resp.text) - - if self.mcclient: - self.mcclient.set(access_token, resp.json(), - time=CONF.keycloak_oidc.token_cache_time) - info = resp.json() - - LOG.debug("HTTP response from OIDC provider: %s", - pprint.pformat(info)) - - return info - - @webob.dec.wsgify - def __call__(self, request): - if 'X-Auth-Token' not in request.headers: - msg = _("Auth token must be provided in 'X-Auth-Token' header.") - LOG.error(msg) - raise exception.Unauthorized() - access_token = request.headers.get('X-Auth-Token') - try: - decoded = jwt.decode(access_token, algorithms=['RS256'], - verify=False) - except Exception: - msg = _("Token can't be decoded because of wrong format.") - LOG.error(msg) - raise exception.Unauthorized() - - # Get user realm from parsed token - # Format is "iss": "http://:/auth/realms/", - __, __, realm_name = decoded['iss'].strip().rpartition('/realms/') - - # Get roles from from parsed token - roles = ','.join(decoded['realm_access']['roles']) \ - if 'realm_access' in decoded else '' - - self.authenticate(access_token, realm_name) - - request.headers["X-Identity-Status"] = "Confirmed" - request.headers["X-Project-Id"] = realm_name - request.headers["X-Roles"] = roles - return request.get_response(self.application) diff --git a/glare/api/middleware/version_negotiation.py b/glare/api/middleware/version_negotiation.py deleted file mode 100644 index f102ba0..0000000 --- a/glare/api/middleware/version_negotiation.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2011 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -A filter middleware that inspects the requested URI for a version string -and/or Accept headers and attempts to negotiate an API controller to -return. -""" - -import microversion_parse -from oslo_log import log as logging -from oslo_middleware import base as base_middleware - - -from glare.api.v1 import api_version_request as api_version -from glare.api import versions as artifacts_versions -from glare.common import exception - -LOG = logging.getLogger(__name__) - - -class GlareVersionNegotiationFilter(base_middleware.ConfigurableMiddleware): - """Middleware that defines API version in request and redirects it - to correct Router. - """ - - SERVICE_TYPE = 'artifact' - MIME_TYPE = 'application/vnd.openstack.artifacts-' - - @staticmethod - def get_version_from_accept(accept_header): - """Try to parse accept header to extract api version. - - :param accept_header: accept header - :return: version string in the request or None if not specified - """ - accept = str(accept_header) - if accept.startswith(GlareVersionNegotiationFilter.MIME_TYPE): - LOG.debug("Using media-type versioning") - return accept[len(GlareVersionNegotiationFilter.MIME_TYPE):] - - return None - - @staticmethod - def process_request(req): - """Process api request: - 1. Define if this is request for available versions or not - 2. If it is not version request check extract version - 3. Validate available version and add version info to request - """ - - args = {'method': req.method, 'path': req.path, 'accept': req.accept} - LOG.debug("Determining version of request: %(method)s %(path)s " - "Accept: %(accept)s", args) - - # determine if this is request for versions - if req.path_info in ('/versions', '/'): - is_multi = req.path_info == '/' - return artifacts_versions.Controller.index( - req, is_multi=is_multi) - - # determine api version from request - req_version = GlareVersionNegotiationFilter.get_version_from_accept( - req.accept) - if req_version is None: - # determine api version from microversion header - LOG.debug("Determine version from microversion header.") - req_version = microversion_parse.get_version( - req.headers, - service_type=GlareVersionNegotiationFilter.SERVICE_TYPE) - - # validate microversions header - req.api_version_request = \ - GlareVersionNegotiationFilter._get_api_version_request( - req_version) - req_version = req.api_version_request.get_string() - - LOG.debug("Matched version: %s", req_version) - LOG.debug('new path %s', req.path_info) - - @staticmethod - def _get_api_version_request(req_version): - """Set API version for request based on the version header string.""" - if req_version is None: - LOG.debug("No API version in request header. Use default version.") - cur_ver = api_version.APIVersionRequest.default_version() - elif req_version == 'latest': - # 'latest' is a special keyword which is equivalent to - # requesting the maximum version of the API supported - cur_ver = api_version.APIVersionRequest.max_version() - else: - cur_ver = api_version.APIVersionRequest(req_version) - - # Check that the version requested is within the global - # minimum/maximum of supported API versions - if not cur_ver.matches(cur_ver.min_version(), cur_ver.max_version()): - raise exception.InvalidGlobalAPIVersion( - req_ver=cur_ver.get_string(), - min_ver=cur_ver.min_version().get_string(), - max_ver=cur_ver.max_version().get_string()) - return cur_ver - - @staticmethod - def process_response(response, request=None): - if hasattr(response, 'headers'): - if hasattr(request, 'api_version_request'): - api_header_name = microversion_parse.STANDARD_HEADER - response.headers[api_header_name] = ( - GlareVersionNegotiationFilter.SERVICE_TYPE + ' ' + - request.api_version_request.get_string()) - response.headers.add('Vary', api_header_name) - - return response diff --git a/glare/api/v1/__init__.py b/glare/api/v1/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/api/v1/api_version_request.py b/glare/api/v1/api_version_request.py deleted file mode 100644 index b32ab75..0000000 --- a/glare/api/v1/api_version_request.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright 2016 Openstack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import re - -from glare.common import exception -from glare.i18n import _ - - -REST_API_VERSION_HISTORY = """REST API Version History: - - * 1.0 - First stable API version that supports microversion. If API version - is not specified in the request then API v1.0 is used as default API - version. -""" - - -class APIVersionRequest(object): - """This class represents an API Version Request with convenience - methods for manipulation and comparison of version - numbers that we need to do to implement microversions. - """ - - _MIN_API_VERSION = "1.0" - _MAX_API_VERSION = "1.0" - _DEFAULT_API_VERSION = "1.0" - - def __init__(self, version_string): - """Create an API version request object. - - :param version_string: String representation of APIVersionRequest. - Correct format is 'X.Y', where 'X' and 'Y' are int values. - """ - match = re.match(r"^([1-9]\d*)\.([1-9]\d*|0)$", version_string) - if match: - self.ver_major = int(match.group(1)) - self.ver_minor = int(match.group(2)) - else: - msg = _("API version string %s is not valid. " - "Cannot determine API version.") % version_string - raise exception.BadRequest(msg) - - def __str__(self): - """Debug/Logging representation of object.""" - return ("API Version Request Major: %s, Minor: %s" - % (self.ver_major, self.ver_minor)) - - def _format_type_error(self, other): - return TypeError(_("'%(other)s' should be an instance of '%(cls)s'") % - {"other": other, "cls": self.__class__}) - - def __lt__(self, other): - if not isinstance(other, APIVersionRequest): - raise self._format_type_error(other) - - return ((self.ver_major, self.ver_minor) < - (other.ver_major, other.ver_minor)) - - def __eq__(self, other): - if not isinstance(other, APIVersionRequest): - raise self._format_type_error(other) - - return ((self.ver_major, self.ver_minor) == - (other.ver_major, other.ver_minor)) - - def __gt__(self, other): - if not isinstance(other, APIVersionRequest): - raise self._format_type_error(other) - - return ((self.ver_major, self.ver_minor) > - (other.ver_major, other.ver_minor)) - - def __le__(self, other): - return self < other or self == other - - def __ne__(self, other): - return not self.__eq__(other) - - def __ge__(self, other): - return self > other or self == other - - def matches(self, min_version, max_version): - """Returns whether the version object represents a version - greater than or equal to the minimum version and less than - or equal to the maximum version. - - :param min_version: Minimum acceptable version. - :param max_version: Maximum acceptable version. - :returns: boolean - """ - return min_version <= self <= max_version - - def get_string(self): - """Converts object to string representation which is used to create - an APIVersionRequest object results in the same version request. - """ - return "%s.%s" % (self.ver_major, self.ver_minor) - - @classmethod - def min_version(cls): - """Minimal allowed api version.""" - return APIVersionRequest(cls._MIN_API_VERSION) - - @classmethod - def max_version(cls): - """Maximal allowed api version.""" - return APIVersionRequest(cls._MAX_API_VERSION) - - @classmethod - def default_version(cls): - """Default api version if no version in request.""" - return APIVersionRequest(cls._DEFAULT_API_VERSION) diff --git a/glare/api/v1/api_versioning.py b/glare/api/v1/api_versioning.py deleted file mode 100644 index 25eb92c..0000000 --- a/glare/api/v1/api_versioning.py +++ /dev/null @@ -1,172 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools - -from glare.api.v1 import api_version_request as api_version -from glare.common import exception as exc -from glare.i18n import _ - - -class VersionedMethod(object): - - def __init__(self, name, start_version, end_version, func): - """Versioning information for a single method. - - :param name: Name of the method - :param start_version: Minimum acceptable version - :param end_version: Maximum acceptable_version - :param func: Method to call - """ - # NOTE(kairat): minimums and maximums are inclusive - self.name = name - self.start_version = start_version - self.end_version = end_version - self.func = func - - def __str__(self): - return ("Version Method %s: min: %s, max: %s" - % (self.name, self.start_version, self.end_version)) - - -class VersionedResource(object): - """Versioned mixin that provides ability to define versioned methods and - return appropriate methods based on user request. - """ - - # prefix for all versioned methods in class - VER_METHODS_ATTR_PREFIX = 'versioned_methods_' - - @staticmethod - def check_for_versions_intersection(func_list): - """Determines whether function list contains version intervals - intersections or not. General algorithm: - https://en.wikipedia.org/wiki/Intersection_algorithm - - :param func_list: list of VersionedMethod objects - :return: boolean - """ - pairs = [] - counter = 0 - for f in func_list: - pairs.append((f.start_version, 1, f)) - pairs.append((f.end_version, -1, f)) - - def compare(x): - return x[0] - - pairs.sort(key=compare) - for p in pairs: - counter += p[1] - if counter > 1: - return True - return False - - @classmethod - def supported_versions(cls, min_ver, max_ver=None): - """Decorator for versioning api methods. - - Add the decorator to any method which takes a request object - as the first parameter and belongs to a class which inherits from - wsgi.Controller. The implementation inspired by Nova. - - :param min_ver: string representing minimum version - :param max_ver: optional string representing maximum version - """ - - def decorator(f): - obj_min_ver = api_version.APIVersionRequest(min_ver) - if max_ver: - obj_max_ver = api_version.APIVersionRequest(max_ver) - else: - obj_max_ver = api_version.APIVersionRequest.max_version() - - # Add to list of versioned methods registered - func_name = f.__name__ - new_func = VersionedMethod(func_name, obj_min_ver, obj_max_ver, f) - - versioned_attr = cls.VER_METHODS_ATTR_PREFIX + cls.__name__ - func_dict = getattr(cls, versioned_attr, {}) - if not func_dict: - setattr(cls, versioned_attr, func_dict) - - func_list = func_dict.get(func_name, []) - if not func_list: - func_dict[func_name] = func_list - func_list.append(new_func) - - # Ensure the list is sorted by minimum version (reversed) - # so later when we work through the list in order we find - # the method which has the latest version which supports - # the version requested. - is_intersect = cls.check_for_versions_intersection( - func_list) - - if is_intersect: - raise exc.ApiVersionsIntersect( - name=new_func.name, - min_ver=new_func.start_version, - max_ver=new_func.end_version, - ) - - func_list.sort(key=lambda vf: vf.start_version, reverse=True) - - return f - - return decorator - - def __getattribute__(self, key): - def version_select(*args, **kwargs): - """Look for the method which matches the name supplied and version - constraints and calls it with the supplied arguments. - - :returns: Returns the result of the method called - :raises: VersionNotFoundForAPIMethod if there is no method which - matches the name and version constraints - """ - # versioning is used in 3 classes: request deserializer and - # controller have request as first argument - # response serializer has response as first argument - # we must respect all three cases - if hasattr(args[0], 'api_version_request'): - ver = args[0].api_version_request - elif hasattr(args[0], 'request'): - ver = args[0].request.api_version_request - else: - raise exc.VersionNotFoundForAPIMethod( - message=_("Api version not found in the request.")) - - func_list = self.versioned_methods[key] - for func in func_list: - if ver.matches(func.start_version, func.end_version): - # Update the version_select wrapper function so - # other decorator attributes like wsgi.response - # are still respected. - functools.update_wrapper(version_select, func.func) - return func.func(self, *args, **kwargs) - - # No version match - raise exc.VersionNotFoundForAPIMethod(version=ver) - - class_obj = object.__getattribute__(self, '__class__') - prefix = object.__getattribute__(self, 'VER_METHODS_ATTR_PREFIX') - attr_name = prefix + object.__getattribute__(class_obj, '__name__') - try: - if key in object.__getattribute__(self, attr_name): - return version_select - except AttributeError: - # No versioning on this class - pass - - return object.__getattribute__(self, key) diff --git a/glare/api/v1/resource.py b/glare/api/v1/resource.py deleted file mode 100644 index aa8892e..0000000 --- a/glare/api/v1/resource.py +++ /dev/null @@ -1,464 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""WSGI Resource definition for Glare. Defines Glare API and serialization/ -deserialization of incoming requests.""" - -import json -import jsonpatch -from oslo_config import cfg -from oslo_log import log as logging -from oslo_utils import encodeutils -import six -from six.moves import http_client -import six.moves.urllib.parse as urlparse - -from glare.api.v1 import api_versioning -from glare.common import exception as exc -from glare.common import wsgi -from glare import engine -from glare.i18n import _ - -LOG = logging.getLogger(__name__) - -CONF = cfg.CONF - -list_configs = [ - cfg.IntOpt('default_api_limit', default=25, - help=_('Default value for the number of items returned by a ' - 'request if not specified explicitly in the request')), - cfg.IntOpt('max_api_limit', default=1000, - help=_('Maximum permissible number of items that could be ' - 'returned by a request')), -] - -CONF.register_opts(list_configs) - -supported_versions = api_versioning.VersionedResource.supported_versions - - -class RequestDeserializer(api_versioning.VersionedResource, - wsgi.JSONRequestDeserializer): - """Glare deserializer for incoming webob requests. - - Deserializer checks and converts incoming request into a bunch of Glare - primitives. So other service components don't work with requests at all. - Deserializer also performs primary API validation without any knowledge - about concrete artifact type structure. - """ - - @staticmethod - def _get_content_type(req, expected=None): - """Determine content type of the request body.""" - if "Content-Type" not in req.headers: - msg = _("Content-Type must be specified.") - LOG.error(msg) - raise exc.BadRequest(msg) - - content_type = req.content_type - if expected is not None and content_type not in expected: - msg = (_('Invalid content type: %(ct)s. Expected: %(exp)s') % - {'ct': content_type, 'exp': ', '.join(expected)}) - raise exc.UnsupportedMediaType(message=msg) - - return content_type - - def _get_request_body(self, req): - """Get request json body and convert it to python structures.""" - return self.from_json(req.body) - - @supported_versions(min_ver='1.0') - def create(self, req): - self._get_content_type(req, expected=['application/json']) - body = self._get_request_body(req) - if not isinstance(body, dict): - msg = _("Dictionary expected as body value. Got %s.") % type(body) - raise exc.BadRequest(msg) - return {'values': body} - - @supported_versions(min_ver='1.0') - def list(self, req): - params = req.params.copy() - marker = params.pop('marker', None) - query_params = {} - # step 1 - apply marker to query if exists - if marker is not None: - query_params['marker'] = marker - - # step 2 - apply limit (if exists OR setup default limit) - limit = params.pop('limit', CONF.default_api_limit) - try: - limit = int(limit) - except ValueError: - msg = _("Limit param must be an integer.") - raise exc.BadRequest(message=msg) - if limit < 0: - msg = _("Limit param must be positive.") - raise exc.BadRequest(message=msg) - query_params['limit'] = min(CONF.max_api_limit, limit) - - # step 3 - parse sort parameters - if 'sort' in params: - sort = [] - for sort_param in params.pop('sort').strip().split(','): - key, _sep, direction = sort_param.partition(':') - if direction and direction not in ('asc', 'desc'): - raise exc.BadRequest('Sort direction must be one of ' - '["asc", "desc"]. Got %s direction' - % direction) - sort.append((key, direction or 'desc')) - query_params['sort'] = sort - - # step 4 - parse filter parameters - filters = [] - for fname, fval in params.items(): - if fname == 'version' and fval == 'latest': - query_params['latest'] = True - else: - filters.append((fname, fval)) - - query_params['filters'] = filters - return query_params - - @supported_versions(min_ver='1.0') - def update(self, req): - self._get_content_type( - req, expected=['application/json-patch+json']) - body = self._get_request_body(req) - patch = jsonpatch.JsonPatch(body) - try: - # Initially patch object doesn't validate input. It's only checked - # when we call get operation on each method - tuple(map(patch._get_operation, patch.patch)) - except (jsonpatch.InvalidJsonPatch, TypeError, AttributeError, - jsonpatch.JsonPointerException): - msg = _("Json Patch body is malformed") - raise exc.BadRequest(msg) - return {'patch': patch} - - @supported_versions(min_ver='1.0') - def upload_blob(self, req): - content_type = self._get_content_type(req) - if content_type == ('application/vnd+openstack.glare-custom-location' - '+json'): - data = self._get_request_body(req) - if 'url' not in data: - msg = _("url is required when specifying external location. " - "Cannot find 'url' in request body: %s") % str(data) - raise exc.BadRequest(msg) - if 'md5' not in data: - msg = _("Incorrect blob metadata. MD5 must be specified " - "for external location in artifact blob.") - raise exc.BadRequest(msg) - else: - data = req.body_file - - if self.is_valid_encoding(req) and self.is_valid_method(req): - req.is_body_readable = True - - return {'data': data, 'content_type': content_type} - - -def log_request_progress(f): - def log_decorator(self, req, *args, **kwargs): - LOG.debug("Request %(request_id)s for %(api_method)s successfully " - "deserialized. Pass request parameters to Engine", - {'request_id': req.context.request_id, - 'api_method': f.__name__}) - result = f(self, req, *args, **kwargs) - LOG.info( - "Request %(request_id)s for artifact %(api_method)s " - "successfully executed.", {'request_id': req.context.request_id, - 'api_method': f.__name__}) - return result - return log_decorator - - -class ArtifactsController(api_versioning.VersionedResource): - """API controller for Glare Artifacts. - - Artifact Controller prepares incoming data for Glare Engine and redirects - data to the appropriate engine method. Once the response data is returned - from the engine Controller passes it next to Response Serializer. - """ - - def __init__(self): - self.engine = engine.Engine() - - @supported_versions(min_ver='1.0') - @log_request_progress - def list_type_schemas(self, req): - """List of detailed descriptions of enabled artifact types. - - :param req: user request - :return: list of json-schemas of all enabled artifact types. - """ - return self.engine.show_type_schemas(req.context) - - @supported_versions(min_ver='1.0') - @log_request_progress - def show_type_schema(self, req, type_name): - """Get detailed artifact type description. - - :param req: user request - :param type_name: artifact type name - :return: json-schema representation of artifact type - """ - type_schema = self.engine.show_type_schemas(req.context, type_name) - return {type_name: type_schema} - - @supported_versions(min_ver='1.0') - @log_request_progress - def create(self, req, type_name, values): - """Create artifact record in Glare. - - :param req: user request - :param type_name: artifact type name - :param values: dict with artifact fields - :return: definition of created artifact - """ - if req.context.tenant is None or req.context.read_only: - msg = _("It's forbidden to anonymous users to create artifacts.") - raise exc.Forbidden(msg) - if not values.get('name'): - msg = _("Name must be specified at creation.") - raise exc.BadRequest(msg) - for field in ('visibility', 'status'): - if field in values: - msg = _("%s is not allowed in a request at creation.") % field - raise exc.BadRequest(msg) - return self.engine.create(req.context, type_name, values) - - @supported_versions(min_ver='1.0') - @log_request_progress - def update(self, req, type_name, artifact_id, patch): - """Update artifact record in Glare. - - :param req: User request - :param type_name: Artifact type name - :param artifact_id: id of artifact to update - :param patch: json patch with artifact changes - :return: definition of updated artifact - """ - return self.engine.save(req.context, type_name, artifact_id, patch) - - @supported_versions(min_ver='1.0') - @log_request_progress - def delete(self, req, type_name, artifact_id): - """Delete artifact from Glare. - - :param req: User request - :param type_name: Artifact type name - :param artifact_id: id of artifact to delete - """ - return self.engine.delete(req.context, type_name, artifact_id) - - @supported_versions(min_ver='1.0') - @log_request_progress - def show(self, req, type_name, artifact_id): - """Show detailed artifact info. - - :param req: User request - :param type_name: Artifact type name - :param artifact_id: id of artifact to show - :return: definition of requested artifact - """ - return self.engine.show(req.context, type_name, artifact_id) - - @supported_versions(min_ver='1.0') - @log_request_progress - def list(self, req, type_name, filters=None, marker=None, limit=None, - sort=None, latest=False): - """List available artifacts. - - :param req: User request - :param type_name: Artifact type name - :param filters: filters that need to be applied to artifact - :param marker: the artifact that considered as begin of the list - so all artifacts before marker (including marker itself) will not be - added to artifact list - :param limit: maximum number of items in list - :param sort: sorting options - :param latest: flag that indicates, that only artifacts with highest - versions should be returned in output - :return: list of requested artifact definitions - """ - artifacts = self.engine.list(req.context, type_name, filters, marker, - limit, sort, latest) - result = {'artifacts': artifacts, - 'type_name': type_name} - if len(artifacts) != 0 and len(artifacts) == limit: - result['next_marker'] = artifacts[-1]['id'] - return result - - @supported_versions(min_ver='1.0') - @log_request_progress - def upload_blob(self, req, type_name, artifact_id, blob_path, data, - content_type): - """Upload blob into Glare repo. - - :param req: User request - :param type_name: Artifact type name - :param artifact_id: id of artifact where to perform upload - :param blob_path: path to artifact blob - :param data: blob payload - :param content_type: data content-type - :return: definition of requested artifact with uploaded blob - """ - field_name, _sep, blob_key = blob_path.partition('/') - if not blob_key: - blob_key = None - if content_type == ('application/vnd+openstack.glare-custom-location' - '+json'): - url = data.pop('url') - return self.engine.add_blob_location( - req.context, type_name, artifact_id, field_name, url, data, - blob_key) - else: - return self.engine.upload_blob( - req.context, type_name, artifact_id, field_name, data, - content_type, blob_key) - - @supported_versions(min_ver='1.0') - @log_request_progress - def download_blob(self, req, type_name, artifact_id, blob_path): - """Download blob data from Artifact. - - :param req: User request - :param type_name: artifact type name - :param artifact_id: id of artifact from where to perform download - :param blob_path: path to artifact blob - :return: requested blob data - """ - field_name, _sep, blob_key = blob_path.partition('/') - if not blob_key: - blob_key = None - data, meta = self.engine.download_blob( - req.context, type_name, artifact_id, field_name, blob_key) - result = {'data': data, 'meta': meta} - return result - - -class ResponseSerializer(api_versioning.VersionedResource, - wsgi.JSONResponseSerializer): - """Glare serializer for outgoing responses. - - Converts data received from the engine to WSGI responses. It also - specifies proper response status and content type as declared in the API. - """ - - @staticmethod - def _prepare_json_response(response, result, - content_type='application/json'): - body = json.dumps(result, ensure_ascii=False) - response.text = six.text_type(body) - response.content_type = content_type + '; charset=UTF-8' - - def list_type_schemas(self, response, type_schemas): - self._prepare_json_response(response, - {'schemas': type_schemas}, - content_type='application/schema+json') - - def show_type_schema(self, response, type_schema): - self._prepare_json_response(response, - {'schemas': type_schema}, - content_type='application/schema+json') - - @supported_versions(min_ver='1.0') - def list_schemas(self, response, type_list): - self._prepare_json_response(response, {'types': type_list}) - - @supported_versions(min_ver='1.0') - def create(self, response, artifact): - self._prepare_json_response(response, artifact) - response.status_int = http_client.CREATED - - @supported_versions(min_ver='1.0') - def show(self, response, artifact): - self._prepare_json_response(response, artifact) - - @supported_versions(min_ver='1.0') - def update(self, response, artifact): - self._prepare_json_response(response, artifact) - - @supported_versions(min_ver='1.0') - def list(self, response, af_list): - params = dict(response.request.params) - params.pop('marker', None) - - encode_params = {} - for key, value in params.items(): - encode_params[key] = encodeutils.safe_encode(value) - query = urlparse.urlencode(encode_params) - - type_name = af_list['type_name'] - body = { - type_name: af_list['artifacts'], - 'first': '/artifacts/%s' % type_name, - 'schema': '/schemas/%s' % type_name, - } - if query: - body['first'] = '%s?%s' % (body['first'], query) - if 'next_marker' in af_list: - params['marker'] = af_list['next_marker'] - next_query = urlparse.urlencode(params) - body['next'] = '/artifacts/%s?%s' % (type_name, next_query) - response.unicode_body = six.text_type(json.dumps(body, - ensure_ascii=False)) - response.content_type = 'application/json' - - @supported_versions(min_ver='1.0') - def delete(self, response, result): - response.status_int = http_client.NO_CONTENT - - @supported_versions(min_ver='1.0') - def upload_blob(self, response, artifact): - self._prepare_json_response(response, artifact) - - @staticmethod - def _serialize_blob(response, result): - data, meta = result['data'], result['meta'] - response.app_iter = iter(data) - response.headers['Content-Type'] = meta['content_type'] - response.headers['Content-MD5'] = meta['md5'] - response.headers['X-Openstack-Glare-Content-SHA1'] = meta['sha1'] - response.headers['X-Openstack-Glare-Content-SHA256'] = meta['sha256'] - response.content_length = str(meta['size']) - - @staticmethod - def _serialize_location(response, result): - data, meta = result['data'], result['meta'] - response.headers['Content-MD5'] = meta['md5'] - response.headers['X-Openstack-Glare-Content-SHA1'] = meta['sha1'] - response.headers['X-Openstack-Glare-Content-SHA256'] = meta['sha256'] - response.location = data['url'] - response.content_type = 'application/json' - response.status = http_client.MOVED_PERMANENTLY - response.content_length = 0 - - @supported_versions(min_ver='1.0') - def download_blob(self, response, result): - external = result['meta']['external'] - if external: - self._serialize_location(response, result) - else: - self._serialize_blob(response, result) - - -def create_resource(): - """Artifact resource factory method.""" - deserializer = RequestDeserializer() - serializer = ResponseSerializer() - controller = ArtifactsController() - return wsgi.Resource(controller, deserializer, serializer) diff --git a/glare/api/v1/router.py b/glare/api/v1/router.py deleted file mode 100644 index d7ed798..0000000 --- a/glare/api/v1/router.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from glare.api.v1 import resource -from glare.common import wsgi - - -class API(wsgi.Router): - """WSGI router for Glare v1 API requests. - - API Router redirects incoming requests to appropriate WSGI resource method. - """ - - def __init__(self, mapper): - - glare_resource = resource.create_resource() - reject_method_resource = wsgi.Resource(wsgi.RejectMethodController()) - - # ---schemas--- - mapper.connect('/schemas', - controller=glare_resource, - action='list_type_schemas', - conditions={'method': ['GET']}, - body_reject=True) - mapper.connect('/schemas', - controller=reject_method_resource, - action='reject', - allowed_methods='GET') - - mapper.connect('/schemas/{type_name}', - controller=glare_resource, - action='show_type_schema', - conditions={'method': ['GET']}, - body_reject=True) - mapper.connect('/schemas/{type_name}', - controller=reject_method_resource, - action='reject', - allowed_methods='GET') - - # ---artifacts--- - mapper.connect('/artifacts/{type_name}', - controller=glare_resource, - action='list', - conditions={'method': ['GET']}, - body_reject=True) - mapper.connect('/artifacts/{type_name}', - controller=glare_resource, - action='create', - conditions={'method': ['POST']}) - mapper.connect('/artifacts/{type_name}', - controller=reject_method_resource, - action='reject', - allowed_methods='GET, POST') - - mapper.connect('/artifacts/{type_name}/{artifact_id}', - controller=glare_resource, - action='update', - conditions={'method': ['PATCH']}) - mapper.connect('/artifacts/{type_name}/{artifact_id}', - controller=glare_resource, - action='show', - conditions={'method': ['GET']}, - body_reject=True) - mapper.connect('/artifacts/{type_name}/{artifact_id}', - controller=glare_resource, - action='delete', - conditions={'method': ['DELETE']}, - body_reject=True) - mapper.connect('/artifacts/{type_name}/{artifact_id}', - controller=reject_method_resource, - action='reject', - allowed_methods='GET, PATCH, DELETE') - - # ---blobs--- - mapper.connect('/artifacts/{type_name}/{artifact_id}/{blob_path:.*?}', - controller=glare_resource, - action='download_blob', - conditions={'method': ['GET']}, - body_reject=True) - mapper.connect('/artifacts/{type_name}/{artifact_id}/{blob_path:.*?}', - controller=glare_resource, - action='upload_blob', - conditions={'method': ['PUT']}) - mapper.connect('/artifacts/{type_name}/{artifact_id}/{blob_path:.*?}', - controller=reject_method_resource, - action='reject', - allowed_methods='GET, PUT') - - super(API, self).__init__(mapper) diff --git a/glare/api/versions.py b/glare/api/versions.py deleted file mode 100644 index de24aef..0000000 --- a/glare/api/versions.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright 2012 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_serialization import jsonutils -from six.moves import http_client -import webob.dec - -from glare.api.v1 import api_version_request -from glare.i18n import _ - - -versions_opts = [ - cfg.StrOpt('public_endpoint', - help=_(""" -Public url endpoint to use for Glare versions response. - -This is the public url endpoint that will appear in the Glare -"versions" response. If no value is specified, the endpoint that is -displayed in the version's response is that of the host running the -API service. Change the endpoint to represent the proxy URL if the -API service is running behind a proxy. If the service is running -behind a load balancer, add the load balancer's URL for this value. - -Services which consume this: - * glare - -Possible values: - * None - * Proxy URL - * Load balancer URL - -Related options: - * None - -""")), -] - - -CONF = cfg.CONF -CONF.register_opts(versions_opts) - - -class Controller(object): - - """A controller that reports which API versions are supported.""" - - @staticmethod - def index(req, is_multi): - """Respond to a request for all OpenStack API versions. - - :param is_multi: defines if multiple choices should be response status - or not - :param req: user request object - :return: list of supported API versions - """ - def build_version_object(max_version, min_version, status, path=None): - url = CONF.public_endpoint or req.host_url - return { - 'id': 'v%s' % max_version, - 'links': [ - { - 'rel': 'self', - 'href': '%s/%s/' % (url, path) if path else - '%s/' % url, - }, - ], - 'status': status, - 'min_version': min_version, - 'version': max_version - } - - microv_max = api_version_request.APIVersionRequest.max_version() - microv_min = api_version_request.APIVersionRequest.min_version() - version_objs = [build_version_object(microv_max.get_string(), - microv_min.get_string(), - 'EXPERIMENTAL')] - return_status = (http_client.MULTIPLE_CHOICES if is_multi else - http_client.OK) - response = webob.Response(request=req, - status=return_status, - content_type='application/json') - response.body = jsonutils.dump_as_bytes(dict(versions=version_objs)) - return response diff --git a/glare/cmd/__init__.py b/glare/cmd/__init__.py deleted file mode 100644 index abf9d88..0000000 --- a/glare/cmd/__init__.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import sys - -import oslo_utils.strutils as strutils - -from glare import i18n - -try: - import dns # noqa -except ImportError: - dnspython_installed = False -else: - dnspython_installed = True - - -def fix_greendns_ipv6(): - if dnspython_installed: - # All of this is because if dnspython is present in your environment - # then eventlet monkeypatches socket.getaddrinfo() with an - # implementation which doesn't work for IPv6. What we're checking here - # is that the magic environment variable was set when the import - # happened. - nogreendns = 'EVENTLET_NO_GREENDNS' - flag = os.environ.get(nogreendns, '') - if 'eventlet' in sys.modules and not strutils.bool_from_string(flag): - msg = i18n._("It appears that the eventlet module has been " - "imported prior to setting %s='yes'. It is currently " - "necessary to disable eventlet.greendns " - "if using ipv6 since eventlet.greendns currently " - "breaks with ipv6 addresses. Please ensure that " - "eventlet is not imported prior to this being set.") - raise ImportError(msg % nogreendns) - - os.environ[nogreendns] = 'yes' - - -i18n.enable_lazy() -fix_greendns_ipv6() diff --git a/glare/cmd/api.py b/glare/cmd/api.py deleted file mode 100755 index cf30b79..0000000 --- a/glare/cmd/api.py +++ /dev/null @@ -1,90 +0,0 @@ -#!/usr/bin/env python -# -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -""" -Glare (Glare Artifact Repository) API service. -""" - -import os -import sys - -import eventlet -from oslo_utils import encodeutils - -eventlet.patcher.monkey_patch(all=False, socket=True, time=True, - select=True, thread=True, os=True) - -# If ../glare/__init__.py exists, add ../ to Python search path, so that -# it will override what happens to be installed in /usr/(local/)lib/python... -possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), - os.pardir, - os.pardir)) -if os.path.exists(os.path.join(possible_topdir, 'glare', '__init__.py')): - sys.path.insert(0, possible_topdir) - -import glance_store -from oslo_config import cfg -from oslo_log import log as logging -from osprofiler import initializer - -from glare.common import config -from glare.common import exception -from glare.common import wsgi -from glare import notification - - -CONF = cfg.CONF -CONF.import_group("profiler", "glare.common.wsgi") -logging.register_options(CONF) - -KNOWN_EXCEPTIONS = (RuntimeError, - exception.WorkerCreationFailure, - glance_store.exceptions.BadStoreConfiguration) - - -def fail(e): - global KNOWN_EXCEPTIONS - return_code = KNOWN_EXCEPTIONS.index(type(e)) + 1 - sys.stderr.write("ERROR: %s\n" % encodeutils.exception_to_unicode(e)) - sys.exit(return_code) - - -def main(): - try: - config.parse_args() - wsgi.set_eventlet_hub() - logging.setup(CONF, 'glare') - notification.set_defaults() - - if CONF.profiler.enabled: - initializer.init_from_conf( - conf=CONF, - context={}, - project="glare", - service="api", - host=CONF.bind_host - ) - - server = wsgi.Server(initialize_glance_store=True) - server.start(config.load_paste_app('glare-api'), default_port=9494) - server.wait() - except KNOWN_EXCEPTIONS as e: - fail(e) - - -if __name__ == '__main__': - main() diff --git a/glare/cmd/db_manage.py b/glare/cmd/db_manage.py deleted file mode 100755 index f8d3857..0000000 --- a/glare/cmd/db_manage.py +++ /dev/null @@ -1,80 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_db import options - -from glare.db.migration import migration - -CONF = cfg.CONF -options.set_defaults(CONF) - - -class DBCommand(object): - - def upgrade(self, config): - migration.upgrade(CONF.command.revision, config=config) - - def downgrade(self, config): - migration.downgrade(CONF.command.revision, config=config) - - def revision(self, config): - migration.revision(CONF.command.message, - CONF.command.autogenerate, - config=config) - - def stamp(self, config): - migration.stamp(CONF.command.revision, config=config) - - def version(self, config): - print(migration.version()) - - -def add_command_parsers(subparsers): - command_object = DBCommand() - - parser = subparsers.add_parser('upgrade') - parser.set_defaults(func=command_object.upgrade) - parser.add_argument('--revision', nargs='?') - - parser = subparsers.add_parser('downgrade') - parser.set_defaults(func=command_object.downgrade) - parser.add_argument('--revision', nargs='?') - - parser = subparsers.add_parser('stamp') - parser.add_argument('--revision', nargs='?') - parser.set_defaults(func=command_object.stamp) - - parser = subparsers.add_parser('revision') - parser.add_argument('-m', '--message') - parser.add_argument('--autogenerate', action='store_true') - parser.set_defaults(func=command_object.revision) - - parser = subparsers.add_parser('version') - parser.set_defaults(func=command_object.version) - - -command_opt = cfg.SubCommandOpt('command', - title='Command', - help='Available commands', - handler=add_command_parsers) - -CONF.register_cli_opt(command_opt) - - -def main(): - config = migration.get_alembic_config() - CONF(project='glare') - CONF.command.func(config) - -if __name__ == '__main__': - main() diff --git a/glare/cmd/scrubber.py b/glare/cmd/scrubber.py deleted file mode 100644 index 693ecfc..0000000 --- a/glare/cmd/scrubber.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Glare Scrub Service -""" - -import os -import sys - -# If ../glare/__init__.py exists, add ../ to Python search path, so that -# it will override what happens to be installed in /usr/(local/)lib/python... -possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), - os.pardir, - os.pardir)) -if os.path.exists(os.path.join(possible_topdir, 'glare', '__init__.py')): - sys.path.insert(0, possible_topdir) -import eventlet - -import glance_store -from oslo_config import cfg -from oslo_log import log as logging - -from glare.common import config -from glare import scrubber - -eventlet.patcher.monkey_patch(all=False, socket=True, time=True, select=True, - thread=True, os=True) - -CONF = cfg.CONF -logging.register_options(CONF) -CONF.set_default(name='use_stderr', default=True) - - -def main(): - CONF.register_cli_opts(scrubber.scrubber_cmd_cli_opts, group='scrubber') - CONF.register_opts(scrubber.scrubber_cmd_opts, group='scrubber') - - try: - config.parse_args() - logging.setup(CONF, 'glare') - - glance_store.register_opts(config.CONF) - glance_store.create_stores(config.CONF) - glance_store.verify_default_store() - - app = scrubber.Scrubber() - - if CONF.scrubber.daemon: - server = scrubber.Daemon(CONF.scrubber.wakeup_time) - server.start(app) - server.wait() - else: - app.run() - except RuntimeError as e: - sys.exit("ERROR: %s" % e) - - -if __name__ == '__main__': - main() diff --git a/glare/common/__init__.py b/glare/common/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/common/config.py b/glare/common/config.py deleted file mode 100644 index bdfc9e6..0000000 --- a/glare/common/config.py +++ /dev/null @@ -1,155 +0,0 @@ -# Copyright 2011 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Routines for configuring Glare. -""" - -import logging.config -import logging.handlers -import os - -from oslo_config import cfg -from oslo_log import log as logging -from oslo_policy import policy -from paste import deploy - -from glare.i18n import _ - -paste_deploy_opts = [ - cfg.StrOpt('flavor', - help=_('Partial name of a pipeline in your paste configuration ' - 'file with the service name removed. For example, if ' - 'your paste section name is ' - '[pipeline:glare-keystone] use the value ' - '"keystone"')), - cfg.StrOpt('config_file', - help=_('Name of the paste configuration file.')), -] - -common_opts = [ - cfg.StrOpt('digest_algorithm', - default='sha256', - help=_(""" -Digest algorithm to use for digital signature. - -Provide a string value representing the digest algorithm to -use for generating digital signatures. By default, ``sha256`` -is used. - -To get a list of the available algorithms supported by the version -of OpenSSL on your platform, run the command: -``openssl list-message-digest-algorithms``. -Examples are 'sha1', 'sha256', and 'sha512'. - -Possible values: - * An OpenSSL message digest algorithm identifier - -Relation options: - * None - -""")), -] - -CONF = cfg.CONF -CONF.register_opts(paste_deploy_opts, group='paste_deploy') -CONF.register_opts(common_opts) -policy.Enforcer(CONF) - - -def parse_args(args=None, usage=None, default_config_files=None): - CONF(args=args, - project='glare', - usage=usage, - default_config_files=default_config_files) - - -def _get_deployment_flavor(flavor=None): - """Retrieve the paste_deploy.flavor config item, formatted appropriately - for appending to the application name. - - :param flavor: if specified, use this setting rather than the - paste_deploy.flavor configuration setting - """ - if not flavor: - flavor = CONF.paste_deploy.flavor - return '' if not flavor else ('-' + flavor) - - -def _get_paste_config_path(): - paste_suffix = '-paste.ini' - conf_suffix = '.conf' - if CONF.config_file: - # Assume paste config is in a paste.ini file corresponding - # to the last config file - path = CONF.config_file[-1].replace(conf_suffix, paste_suffix) - else: - path = CONF.prog + paste_suffix - return CONF.find_file(os.path.basename(path)) - - -def _get_deployment_config_file(): - """Retrieve the deployment_config_file config item, formatted as an - absolute pathname. - """ - path = CONF.paste_deploy.config_file - if not path: - path = _get_paste_config_path() - if not path: - msg = _("Unable to locate paste config file for %s.") % CONF.prog - raise RuntimeError(msg) - return os.path.abspath(path) - - -def load_paste_app(app_name, flavor=None, conf_file=None): - """Builds and returns a WSGI app from a paste config file. - - We assume the last config file specified in the supplied ConfigOpts - object is the paste config file, if conf_file is None. - - :param app_name: name of the application to load - :param flavor: name of the variant of the application to load - :param conf_file: path to the paste config file - - :raises: RuntimeError when config file cannot be located or application - cannot be loaded from config file - """ - # append the deployment flavor to the application name, - # in order to identify the appropriate paste pipeline - app_name += _get_deployment_flavor(flavor) - - if not conf_file: - conf_file = _get_deployment_config_file() - - logger = logging.getLogger(__name__) - try: - logger.debug("Loading %(app_name)s from %(conf_file)s", - {'conf_file': conf_file, 'app_name': app_name}) - - app = deploy.loadapp("config:%s" % conf_file, name=app_name) - - # Log the options used when starting if we're in debug mode... - if CONF.debug: - CONF.log_opt_values(logger, logging.DEBUG) - - return app - except (LookupError, ImportError) as e: - msg = (_("Unable to load %(app_name)s from " - "configuration file %(conf_file)s." - "\nGot: %(e)r") % {'app_name': app_name, - 'conf_file': conf_file, - 'e': e}) - logger.error(msg) - raise RuntimeError(msg) diff --git a/glare/common/exception.py b/glare/common/exception.py deleted file mode 100644 index aec6ae0..0000000 --- a/glare/common/exception.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from oslo_log import log as logging -import six - -from glare.i18n import _ - -LOG = logging.getLogger(__name__) - - -class GlareException(Exception): - """Base Glare Exception class. - - To correctly use this class, inherit from it and define - a 'message' property. That message will get printf'd - with the keyword arguments provided to the constructor. - """ - message = _("An unknown exception occurred") - - def __init__(self, message=None, **kwargs): - if message: - self.message = message - self.kwargs = kwargs - if self.kwargs: - self.message = self.message % kwargs - LOG.error(self.message) - super(GlareException, self).__init__(self.message) - - def __unicode__(self): - return six.text_type(self.message) - - -class BadRequest(GlareException): - message = _("Bad request") - - -class InvalidParameterValue(BadRequest): - message = _("Invalid filter value ") - - -class InvalidFilterOperatorValue(BadRequest): - msg = _("Unable to filter by unknown operator.") - - -class InvalidVersion(GlareException): - message = _("Provided version is invalid") - - -class NotAcceptable(GlareException): - message = _("Not acceptable") - - -class InvalidGlobalAPIVersion(NotAcceptable): - message = _("Version %(req_ver)s is not supported by the API. Minimum " - "is %(min_ver)s and maximum is %(max_ver)s.") - - -class VersionNotFoundForAPIMethod(GlareException): - message = _("API version %(version)s is not supported on this method.") - - -class ApiVersionsIntersect(GlareException): - message = _("Version of %(name)s %(min_ver)s %(max_ver)s intersects " - "with another versions.") - - -class Unauthorized(GlareException): - message = _('You are not authenticated') - - -class Forbidden(GlareException): - message = _("You are not authorized to complete this action.") - - -class PolicyException(Forbidden): - message = _("Policy check for %(policy_name)s " - "failed with user credentials.") - - -class NotFound(GlareException): - message = _("An object with the specified identifier was not found.") - - -class TypeNotFound(NotFound): - message = _("Glare type with name '%(name)s' was not found.") - - -class IncorrectArtifactType(GlareException): - message = _("Artifact type is incorrect: %(explanation)s") - - -class ArtifactNotFound(NotFound): - message = _("Artifact with type name '%(type_name)s' and id '%(id)s' was " - "not found.") - - -class RequestTimeout(GlareException): - message = _("The client did not produce a request within the time " - "that the server was prepared to wait.") - - -class Conflict(GlareException): - message = _("The request could not be completed due to a conflict " - "with the current state of the resource.") - - -class Gone(GlareException): - message = _("The requested resource is no longer available at the " - "server and no forwarding address is known.") - - -class PreconditionFailed(GlareException): - message = _("The precondition given in one or more of the request-header " - "fields evaluated to false when it was tested on the server.") - - -class RequestEntityTooLarge(GlareException): - message = _("The server is refusing to process a request because the " - "request entity is larger than the server is willing or " - "able to process.") - - -class RequestRangeNotSatisfiable(GlareException): - message = _("The request included a Range request-header field, and none " - "of the range-specifier values in this field overlap the " - "current extent of the selected resource, and the request " - "did not include an If-Range request-header field.") - - -class Locked(GlareException): - message = _('The resource is locked.') - - -class FailedDependency(GlareException): - message = _('The method could not be performed because the requested ' - 'action depended on another action and that action failed.') - - -class UnsupportedMediaType(GlareException): - message = _("Unsupported media type.") - - -class SIGHUPInterrupt(GlareException): - message = _("System SIGHUP signal received.") - - -class WorkerCreationFailure(GlareException): - message = _("Server worker creation failed: %(reason)s.") - - -class DBNotAllowed(GlareException): - msg_fmt = _('This operation is not allowed with current DB') diff --git a/glare/common/policy.py b/glare/common/policy.py deleted file mode 100644 index fabdd58..0000000 --- a/glare/common/policy.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright 2011-2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Glare policy operations inspired by Nova implementation.""" - -from oslo_config import cfg -from oslo_log import log as logging -from oslo_policy import policy - -from glare.common import exception - -CONF = cfg.CONF -LOG = logging.getLogger(__name__) - -_ENFORCER = None - - -artifact_policy_rules = [ - policy.RuleDefault('context_is_admin', 'role:admin'), - policy.RuleDefault('admin_or_owner', - 'is_admin:True or project_id:%(owner)s'), - policy.RuleDefault("artifact:type_list", "", - "Policy to request list of artifact types"), - policy.RuleDefault("artifact:create", "", "Policy to create artifact."), - policy.RuleDefault("artifact:update_public", - "'public':%(visibility)s and rule:context_is_admin " - "or not 'public':%(visibility)s", - "Policy to update public artifact"), - policy.RuleDefault("artifact:update", "rule:admin_or_owner and " - "rule:artifact:update_public", - "Policy to update artifact"), - policy.RuleDefault("artifact:activate", "rule:admin_or_owner", - "Policy to activate artifact"), - policy.RuleDefault("artifact:reactivate", "rule:context_is_admin", - "Policy to reactivate artifact"), - policy.RuleDefault("artifact:deactivate", "rule:context_is_admin", - "Policy to update artifact"), - policy.RuleDefault("artifact:publish", "rule:context_is_admin", - "Policy to publish artifact"), - policy.RuleDefault("artifact:get", "", - "Policy to get artifact definition"), - policy.RuleDefault("artifact:list", "", - "Policy to list artifacts"), - policy.RuleDefault("artifact:delete_public", - "'public':%(visibility)s and rule:context_is_admin " - "or not 'public':%(visibility)s", - "Policy to delete public artifacts"), - policy.RuleDefault("artifact:delete_deactivated", - "'deactivated':%(status)s and rule:context_is_admin " - "or not 'deactivated':%(status)s", - "Policy to delete deactivated artifacts"), - policy.RuleDefault("artifact:delete", "rule:admin_or_owner and " - "rule:artifact:delete_public and " - "rule:artifact:delete_deactivated", - "Policy to delete artifacts"), - policy.RuleDefault("artifact:set_location", "rule:admin_or_owner", - "Policy to set custom location for artifact"), - policy.RuleDefault("artifact:upload", "rule:admin_or_owner", - "Policy to upload blob for artifact"), - policy.RuleDefault("artifact:download_deactivated", - "'deactivated':%(status)s and rule:context_is_admin " - "or not 'deactivated':%(status)s", - "Policy to download blob from deactivated artifact"), - policy.RuleDefault("artifact:download", - "rule:admin_or_owner and " - "rule:artifact:download_deactivated", - "Policy to download blob from artifact"), -] - - -def list_rules(): - return artifact_policy_rules - - -def init(use_conf=True): - """Init an Enforcer class. - """ - - global _ENFORCER - if not _ENFORCER: - _ENFORCER = policy.Enforcer(CONF, use_conf=use_conf) - _ENFORCER.register_defaults(list_rules()) - return _ENFORCER - - -def reset(): - global _ENFORCER - if _ENFORCER: - _ENFORCER.clear() - _ENFORCER = None - - -def authorize(policy_name, target, context, do_raise=True): - """Method checks that user action can be executed according to policies. - - :param policy_name: policy name - :param target: - :param do_raise - :param context: - :return: True if check passed - """ - creds = context.to_policy_values() - result = init().authorize( - policy_name, target, creds, do_raise=do_raise, - exc=exception.PolicyException, policy_name=policy_name) - LOG.debug("Policy %(policy)s check %(result)s for request %(request_id)s", - {'policy': policy_name, - 'result': 'passed' if result else 'failed', - 'request_id': context.request_id}) - return result - - -def check_is_admin(context): - """Whether or not roles contains 'admin' role according to policy setting. - """ - return authorize('context_is_admin', {}, context, do_raise=False) diff --git a/glare/common/semver_db.py b/glare/common/semver_db.py deleted file mode 100644 index d89dafa..0000000 --- a/glare/common/semver_db.py +++ /dev/null @@ -1,173 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import operator - -import semantic_version -from sqlalchemy.orm.properties import CompositeProperty -from sqlalchemy import sql - -from glare.common import exception -from glare.i18n import _ - -MAX_COMPONENT_LENGTH = pow(2, 16) - 1 -MAX_NUMERIC_PRERELEASE_LENGTH = 6 - - -class DBVersion(object): - def __init__(self, components_long, prerelease, build): - """Creates a DBVersion object out of 3 component fields. This initializer - is supposed to be called from SQLAlchemy if 3 database columns are - mapped to this composite field. - - :param components_long: a 64-bit long value, containing numeric - components of the version - :param prerelease: a prerelease label of the version, optionally - preformatted with leading zeroes in numeric-only parts of the label - :param build: a build label of the version - """ - version_string = '%s.%s.%s' % _long_to_components(components_long) - if prerelease: - version_string += '-' + _strip_leading_zeroes_from_prerelease( - prerelease) - - if build: - version_string += '+' + build - self.version = semantic_version.Version(version_string) - - def __repr__(self): - return str(self.version) - - def __eq__(self, other): - return (isinstance(other, DBVersion) and - other.version == self.version) - - def __ne__(self, other): - return (not isinstance(other, DBVersion) - or self.version != other.version) - - def __composite_values__(self): - long_version = _version_to_long(self.version) - prerelease = _add_leading_zeroes_to_prerelease(self.version.prerelease) - build = '.'.join(self.version.build) if self.version.build else None - return long_version, prerelease, build - - -def parse(version_string): - version = semantic_version.Version.coerce(version_string) - return DBVersion(_version_to_long(version), - '.'.join(version.prerelease), - '.'.join(version.build)) - - -def _check_limit(value): - if value > MAX_COMPONENT_LENGTH: - message = _("Version component is too " - "large (%d max)") % MAX_COMPONENT_LENGTH - raise exception.InvalidVersion(message) - - -def _version_to_long(version): - """Converts the numeric part of the semver version into the 64-bit long value - using the following logic: - - * major version is stored in first 16 bits of the value - * minor version is stored in next 16 bits - * patch version is stored in following 16 bits - * next 2 bits are used to store the flag: if the version has pre-release - label then these bits are 00, otherwise they are 11. Intermediate values - of the flag (01 and 10) are reserved for future usage. - * last 14 bits of the value are reserved for future usage - - The numeric components of version are checked so their value does not - exceed 16 bits. - - :param version: a semantic_version.Version object - """ - _check_limit(version.major) - _check_limit(version.minor) - _check_limit(version.patch) - major = version.major << 48 - minor = version.minor << 32 - patch = version.patch << 16 - flag = 0 if version.prerelease else 2 - flag <<= 14 - return major | minor | patch | flag - - -def _long_to_components(value): - major = value >> 48 - minor = (value - (major << 48)) >> 32 - patch = (value - (major << 48) - (minor << 32)) >> 16 - return str(major), str(minor), str(patch) - - -def _add_leading_zeroes_to_prerelease(label_tuple): - if label_tuple is None: - return None - res = [] - for component in label_tuple: - if component.isdigit(): - if len(component) > MAX_NUMERIC_PRERELEASE_LENGTH: - message = _("Prerelease numeric component is too large " - "(%d characters " - "max)") % MAX_NUMERIC_PRERELEASE_LENGTH - raise exception.InvalidVersion(message) - res.append(component.rjust(MAX_NUMERIC_PRERELEASE_LENGTH, '0')) - else: - res.append(component) - return '.'.join(res) - - -def _strip_leading_zeroes_from_prerelease(string_value): - res = [] - for component in string_value.split('.'): - if component.isdigit(): - val = component.lstrip('0') - if len(val) == 0: # Corner case: when the component is just '0' - val = '0' # it will be stripped completely, so restore it - res.append(val) - else: - res.append(component) - return '.'.join(res) - -strict_op_map = { - operator.ge: operator.gt, - operator.le: operator.lt -} - - -class VersionComparator(CompositeProperty.Comparator): - def _get_comparison(self, values, op): - columns = self.__clause_element__().clauses - if op in strict_op_map: - stricter_op = strict_op_map[op] - else: - stricter_op = op - - return sql.or_(stricter_op(columns[0], values[0]), - sql.and_(columns[0] == values[0], - op(columns[1], values[1]))) - - def __gt__(self, other): - return self._get_comparison(other.__composite_values__(), operator.gt) - - def __ge__(self, other): - return self._get_comparison(other.__composite_values__(), operator.ge) - - def __lt__(self, other): - return self._get_comparison(other.__composite_values__(), operator.lt) - - def __le__(self, other): - return self._get_comparison(other.__composite_values__(), operator.le) diff --git a/glare/common/store_api.py b/glare/common/store_api.py deleted file mode 100644 index 9e9223d..0000000 --- a/glare/common/store_api.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from glance_store import backend -from glance_store import exceptions as store_exc -from oslo_config import cfg -from oslo_log import log as logging - -from glare.common import exception -from glare.common import utils -from glare.store import database - -CONF = cfg.CONF -LOG = logging.getLogger(__name__) -database_api = database.DatabaseStoreAPI() - -error_map = [{'catch': store_exc.NotFound, - 'raise': exception.NotFound}, - {'catch': store_exc.UnknownScheme, - 'raise': exception.BadRequest}, - {'catch': store_exc.BadStoreUri, - 'raise': exception.BadRequest}, - {'catch': store_exc.Duplicate, - 'raise': exception.Conflict}, - {'catch': store_exc.StorageFull, - 'raise': exception.Forbidden}, - {'catch': store_exc.StorageWriteDenied, - 'raise': exception.Forbidden}, - {'catch': store_exc.Forbidden, - 'raise': exception.Forbidden}, - {'catch': store_exc.Invalid, - 'raise': exception.BadRequest}, - {'catch': store_exc.BadStoreConfiguration, - 'raise': exception.GlareException}, - {'catch': store_exc.RemoteServiceUnavailable, - 'raise': exception.BadRequest}, - {'catch': store_exc.HasSnapshot, - 'raise': exception.Conflict}, - {'catch': store_exc.InUseByStore, - 'raise': exception.Conflict}, - {'catch': store_exc.BackendException, - 'raise': exception.GlareException}, - {'catch': store_exc.GlanceStoreException, - 'raise': exception.GlareException}] - - -@utils.error_handler(error_map) -def save_blob_to_store(blob_id, blob, context, max_size, - store_type=None, verifier=None): - """Save file to specified store type and return location info to the user. - - :param store_type: type of the store, None means save to default store. - :param blob_id: id of blob - :param blob: blob file iterator - :param context: user context - :param verifier:signature verified - :return: tuple of values: (location_uri, size, checksums) - """ - if store_type not in set(CONF.glance_store.stores + ['database']): - LOG.warning("Incorrect backend configuration - scheme '%s' is not" - " supported. Fallback to default store.", store_type) - store_type = None - data = utils.LimitingReader(utils.CooperativeReader(blob), max_size) - - LOG.debug('Start uploading blob %s.', blob_id) - if store_type == 'database': - location = database_api.add_to_backend( - blob_id, data, context, verifier) - else: - (location, size, md5checksum, __) = backend.add_to_backend( - CONF, blob_id, data, 0, store_type, context, verifier) - LOG.debug('Uploading of blob %s is finished.', blob_id) - - checksums = {"md5": data.md5.hexdigest(), - "sha1": data.sha1.hexdigest(), - "sha256": data.sha256.hexdigest()} - return location, data.bytes_read, checksums - - -@utils.error_handler(error_map) -def load_from_store(uri, context): - """Load file from store backend. - - :param uri: blob uri - :param context: user context - :return: file iterator - """ - if uri.startswith("sql://"): - return utils.BlobIterator( - database_api.get_from_store(uri, context)) - return backend.get_from_backend(uri=uri, context=context)[0] - - -@utils.error_handler(error_map) -def delete_blob(uri, context): - """Delete blob from backend store. - - :param uri: blob uri - :param context: user context - """ - if uri.startswith("sql://"): - return database_api.delete_from_store(uri, context) - return backend.delete_from_backend(uri, context) diff --git a/glare/common/utils.py b/glare/common/utils.py deleted file mode 100644 index 2d04308..0000000 --- a/glare/common/utils.py +++ /dev/null @@ -1,582 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2014 SoftLayer Technologies, Inc. -# Copyright 2015 Mirantis, Inc -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -System-level utilities and helper functions. -""" -try: - from eventlet import sleep -except ImportError: - from time import sleep -from eventlet.green import socket - -import hashlib -import os -import re - -from OpenSSL import crypto -from oslo_config import cfg -from oslo_log import log as logging -from oslo_utils import encodeutils -from oslo_utils import excutils -from oslo_utils import timeutils -from oslo_utils import uuidutils -from oslo_versionedobjects import fields -import six - -from glare.common import exception -from glare.i18n import _ -from glare.objects.meta import fields as glare_fields - -CONF = cfg.CONF - -LOG = logging.getLogger(__name__) - -GLARE_TEST_SOCKET_FD_STR = 'GLARE_TEST_SOCKET_FD' - - -def cooperative_iter(iter): - """Return an iterator which schedules after each - iteration. This can prevent eventlet thread starvation. - - :param iter: an iterator to wrap - """ - try: - for chunk in iter: - sleep(0) - yield chunk - except Exception as err: - with excutils.save_and_reraise_exception(): - LOG.error("Error: cooperative_iter exception %s", err) - - -def cooperative_read(fd): - """Wrap a file descriptor's read with a partial function which schedules - after each read. This can prevent eventlet thread starvation. - - :param fd: a file descriptor to wrap - """ - def readfn(*args): - result = fd.read(*args) - sleep(0) - return result - return readfn - - -MAX_COOP_READER_BUFFER_SIZE = 134217728 # 128M seems like a sane buffer limit - - -class CooperativeReader(object): - """An eventlet thread friendly class for reading in blob data. - - When accessing data either through the iterator or the read method - we perform a sleep to allow a co-operative yield. When there is more than - one blob being uploaded/downloaded this prevents eventlet thread - starvation, ie allows all threads to be scheduled periodically rather than - having the same thread be continuously active. - """ - def __init__(self, fd): - """:param fd: Underlying blob file object - """ - self.fd = fd - self.iterator = None - # NOTE(markwash): if the underlying supports read(), overwrite the - # default iterator-based implementation with cooperative_read which - # is more straightforward - if hasattr(fd, 'read'): - self.read = cooperative_read(fd) - else: - self.iterator = None - self.buffer = b'' - self.position = 0 - - def read(self, length=None): - """Return the requested amount of bytes, fetching the next chunk of - the underlying iterator when needed. - - This is replaced with cooperative_read in __init__ if the underlying - fd already supports read(). - """ - if length is None: - if len(self.buffer) - self.position > 0: - # if no length specified but some data exists in buffer, - # return that data and clear the buffer - result = self.buffer[self.position:] - self.buffer = b'' - self.position = 0 - return str(result) - else: - # otherwise read the next chunk from the underlying iterator - # and return it as a whole. Reset the buffer, as subsequent - # calls may specify the length - try: - if self.iterator is None: - self.iterator = self.__iter__() - return next(self.iterator) - except StopIteration: - return '' - finally: - self.buffer = b'' - self.position = 0 - else: - result = bytearray() - while len(result) < length: - if self.position < len(self.buffer): - to_read = length - len(result) - chunk = self.buffer[self.position:self.position + to_read] - result.extend(chunk) - - # This check is here to prevent potential OOM issues if - # this code is called with unreasonably high values of read - # size. Currently it is only called from the HTTP clients - # of Glare backend stores, which use httplib for data - # streaming, which has readsize hardcoded to 8K, so this - # check should never fire. Regardless it still worths to - # make the check, as the code may be reused somewhere else. - if len(result) >= MAX_COOP_READER_BUFFER_SIZE: - raise exception.RequestEntityTooLarge() - self.position += len(chunk) - else: - try: - if self.iterator is None: - self.iterator = self.__iter__() - self.buffer = next(self.iterator) - self.position = 0 - except StopIteration: - self.buffer = b'' - self.position = 0 - return bytes(result) - return bytes(result) - - def __iter__(self): - return cooperative_iter(self.fd.__iter__()) - - -class LimitingReader(object): - """Reader designed to fail when reading blob data past the configured - allowable amount. - """ - def __init__(self, data, limit): - """ - :param data: Underlying blob data object - :param limit: maximum number of bytes the reader should allow - """ - self.data = data - self.limit = limit - self.bytes_read = 0 - self.md5 = hashlib.md5() - self.sha1 = hashlib.sha1() - self.sha256 = hashlib.sha256() - - def __iter__(self): - for chunk in self.data: - self.bytes_read += len(chunk) - if self.bytes_read > self.limit: - raise exception.RequestEntityTooLarge() - else: - yield chunk - - def read(self, length=None): - res = self.data.read() if length is None else self.data.read(length) - len_result = len(res) - self.bytes_read += len_result - if len_result: - self.md5.update(res) - self.sha1.update(res) - self.sha256.update(res) - if self.bytes_read > self.limit: - message = _("The server is refusing to process a request because" - " the request entity is larger than the server is" - " willing or able to process - %s bytes.") % self.limit - raise exception.RequestEntityTooLarge(message=message) - return res - - -def validate_key_cert(key_file, cert_file): - try: - error_key_name = "private key" - error_filename = key_file - with open(key_file, 'r') as keyfile: - key_str = keyfile.read() - key = crypto.load_privatekey(crypto.FILETYPE_PEM, key_str) - - error_key_name = "certificate" - error_filename = cert_file - with open(cert_file, 'r') as certfile: - cert_str = certfile.read() - cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_str) - except IOError as ioe: - raise RuntimeError(_("There is a problem with your %(error_key_name)s " - "%(error_filename)s. Please verify it." - " Error: %(ioe)s") % - {'error_key_name': error_key_name, - 'error_filename': error_filename, - 'ioe': ioe}) - except crypto.Error as ce: - raise RuntimeError(_("There is a problem with your %(error_key_name)s " - "%(error_filename)s. Please verify it. OpenSSL" - " error: %(ce)s") % - {'error_key_name': error_key_name, - 'error_filename': error_filename, - 'ce': ce}) - - try: - data = uuidutils.generate_uuid() - # On Python 3, explicitly encode to UTF-8 to call crypto.sign() which - # requires bytes. Otherwise, it raises a deprecation warning (and - # will raise an error later). - data = encodeutils.to_utf8(data) - digest = CONF.digest_algorithm - if digest == 'sha1': - LOG.warning( - 'The FIPS (FEDERAL INFORMATION PROCESSING STANDARDS)' - ' state that the SHA-1 is not suitable for' - ' general-purpose digital signature applications (as' - ' specified in FIPS 186-3) that require 112 bits of' - ' security. The default value is sha1 in Kilo for a' - ' smooth upgrade process, and it will be updated' - ' with sha256 in next release(L).') - out = crypto.sign(key, data, digest) - crypto.verify(cert, out, data, digest) - except crypto.Error as ce: - raise RuntimeError(_("There is a problem with your key pair. " - "Please verify that cert %(cert_file)s and " - "key %(key_file)s belong together. OpenSSL " - "error %(ce)s") % {'cert_file': cert_file, - 'key_file': key_file, - 'ce': ce}) - - -def get_test_suite_socket(): - global GLARE_TEST_SOCKET_FD_STR - if GLARE_TEST_SOCKET_FD_STR in os.environ: - fd = int(os.environ[GLARE_TEST_SOCKET_FD_STR]) - sock = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM) - if six.PY2: - sock = socket.SocketType(_sock=sock) - sock.listen(CONF.backlog) - del os.environ[GLARE_TEST_SOCKET_FD_STR] - os.close(fd) - return sock - return None - - -try: - REGEX_4BYTE_UNICODE = re.compile(u'[\U00010000-\U0010ffff]') -except re.error: - # UCS-2 build case - REGEX_4BYTE_UNICODE = re.compile(u'[\uD800-\uDBFF][\uDC00-\uDFFF]') - - -def no_4byte_params(f): - """Checks that no 4 byte unicode characters are allowed - in dicts' keys/values and string's parameters. - """ - def wrapper(*args, **kwargs): - - def _is_match(some_str): - return (isinstance(some_str, six.text_type) and - REGEX_4BYTE_UNICODE.findall(some_str) != []) - - def _check_dict(data_dict): - # a dict of dicts has to be checked recursively - for key, value in data_dict.items(): - if isinstance(value, dict): - _check_dict(value) - else: - if _is_match(key): - msg = _("Property names can't contain 4 byte unicode.") - raise exception.BadRequest(msg) - if _is_match(value): - msg = (_("%s can't contain 4 byte unicode characters.") - % key.title()) - raise exception.BadRequest(msg) - - for data_dict in [arg for arg in args if isinstance(arg, dict)]: - _check_dict(data_dict) - # now check args for str values - for arg in args: - if _is_match(arg): - msg = _("Param values can't contain 4 byte unicode.") - raise exception.BadRequest(msg) - # check kwargs as well, as params are passed as kwargs via - # registry calls - _check_dict(kwargs) - return f(*args, **kwargs) - return wrapper - - -def stash_conf_values(): - """Make a copy of some of the current global CONF's settings. - Allows determining if any of these values have changed - when the config is reloaded. - """ - conf = { - 'bind_host': CONF.bind_host, - 'bind_port': CONF.bind_port, - 'tcp_keepidle': CONF.cert_file, - 'backlog': CONF.backlog, - 'key_file': CONF.key_file, - 'cert_file': CONF.cert_file, - 'enabled_artifact_types': CONF.enabled_artifact_types, - 'custom_artifact_types_modules': CONF.custom_artifact_types_modules - } - - return conf - - -def split_filter_op(expression): - """Split operator from threshold in an expression. - Designed for use on a comparative-filtering query field. - When no operator is found, default to an equality comparison. - - :param expression: the expression to parse - :return: a tuple (operator, threshold) parsed from expression - """ - left, sep, right = expression.partition(':') - if sep: - # If the expression is a date of the format ISO 8601 like - # CCYY-MM-DDThh:mm:ss+hh:mm and has no operator, it should - # not be partitioned, and a default operator of eq should be - # assumed. - try: - timeutils.parse_isotime(expression) - op = 'eq' - threshold = expression - except ValueError: - op = left - threshold = right - else: - op = 'eq' # default operator - threshold = left - - # NOTE stevelle decoding escaped values may be needed later - return op, threshold - - -def validate_quotes(value): - """Validate filter values - - Validation opening/closing quotes in the expression. - """ - open_quotes = True - for i in range(len(value)): - if value[i] == '"': - if i and value[i - 1] == '\\': - continue - if open_quotes: - if i and value[i - 1] != ',': - msg = _("Invalid filter value %s. There is no comma " - "before opening quotation mark.") % value - raise exception.InvalidParameterValue(message=msg) - else: - if i + 1 != len(value) and value[i + 1] != ",": - msg = _("Invalid filter value %s. There is no comma " - "after closing quotation mark.") % value - raise exception.InvalidParameterValue(message=msg) - open_quotes = not open_quotes - if not open_quotes: - msg = _("Invalid filter value %s. The quote is not closed.") % value - raise exception.InvalidParameterValue(message=msg) - - -def split_filter_value_for_quotes(value): - """Split filter values - - Split values by commas and quotes for 'in' operator, according api-wg. - """ - validate_quotes(value) - tmp = re.compile(r''' - "( # if found a double-quote - [^\"\\]* # take characters either non-quotes or backslashes - (?:\\. # take backslashes and character after it - [^\"\\]*)* # take characters either non-quotes or backslashes - ) # before double-quote - ",? # a double-quote with comma maybe - | ([^,]+),? # if not found double-quote take any non-comma - # characters with comma maybe - | , # if we have only comma take empty string - ''', re.VERBOSE) - return [val[0] or val[1] for val in re.findall(tmp, value)] - - -class error_handler(object): - def __init__(self, error_map, default_exception=None): - """Init method of the class. - - :param error_map: dict of exception that can be raised - in func and exceptions that must be raised for these exceptions. - For example, if sqlalchemy NotFound might be raised and we need - re-raise it as glare NotFound exception then error_map must - contain {"catch": SQLAlchemyNotFound, - "raise": exceptions.NotFound} - :param default_exception: default exception that must be raised if - exception that cannot be found in error map was raised - :return: func - """ - self.error_map = error_map - self.default_exception = default_exception - - def __call__(self, f): - """Decorator that catches exception that came from func or method. - - :param f: target func - """ - - def new_function(*args, **kwargs): - try: - return f(*args, **kwargs) - except Exception as e: - for map_record in self.error_map: - if isinstance(e, map_record['catch']): - raise map_record['raise'](str(e)) - else: - if self.default_exception: - raise self.default_exception(str(e)) - else: - raise - return new_function - - -def get_schema_type(attr): - if isinstance(attr, fields.IntegerField) or attr is fields.Integer: - return 'integer' - elif isinstance(attr, fields.FloatField) or attr is fields.Float: - return 'number' - elif isinstance(attr, fields.FlexibleBooleanField) \ - or attr is fields.FlexibleBoolean: - return 'boolean' - elif isinstance(attr, glare_fields.List): - return 'array' - elif isinstance(attr, (glare_fields.Dict, glare_fields.BlobField)): - return 'object' - return 'string' - - -def get_glare_type(attr): - if isinstance(attr, fields.IntegerField): - return 'Integer' - elif isinstance(attr, fields.FloatField): - return 'Float' - elif isinstance(attr, fields.FlexibleBooleanField): - return 'Boolean' - elif isinstance(attr, fields.DateTimeField): - return 'DateTime' - elif isinstance(attr, glare_fields.BlobField): - return 'Blob' - elif isinstance(attr, glare_fields.Link): - return 'Link' - elif isinstance(attr, glare_fields.List): - return _get_element_type(attr.element_type) + 'List' - elif isinstance(attr, glare_fields.Dict): - return _get_element_type(attr.element_type) + 'Dict' - return 'String' - - -def _get_element_type(element_type): - if element_type is fields.FlexibleBooleanField: - return 'Boolean' - elif element_type is fields.Integer: - return 'Integer' - elif element_type is fields.Float: - return 'Float' - elif element_type is glare_fields.BlobFieldType: - return 'Blob' - elif element_type is glare_fields.LinkFieldType: - return 'Link' - return 'String' - - -class BlobIterator(object): - """Reads data from a blob, one chunk at a time. - """ - - def __init__(self, data, chunk_size=65536): - self.chunk_size = chunk_size - self.data = data - - def __iter__(self): - bytes_left = len(self.data) - i = 0 - while bytes_left > 0: - data = self.data[i * self.chunk_size:(i + 1) * self.chunk_size] - bytes_left -= len(data) - yield data - raise StopIteration() - - -def validate_status_transition(af, from_status, to_status): - if from_status == 'deleted': - msg = _("Cannot change status if artifact is deleted.") - raise exception.Forbidden(msg) - if to_status == 'active': - if from_status == 'drafted': - for name, type_obj in af.fields.items(): - if type_obj.required_on_activate and getattr(af, name) is None: - msg = _("'%s' field value must be set before " - "activation.") % name - raise exception.Forbidden(msg) - elif to_status == 'drafted': - if from_status != 'drafted': - msg = _("Cannot change status to 'drafted'") % from_status - raise exception.Forbidden(msg) - elif to_status == 'deactivated': - if from_status not in ('active', 'deactivated'): - msg = _("Cannot deactivate artifact if it's not active.") - raise exception.Forbidden(msg) - elif to_status == 'deleted': - msg = _("Cannot delete artifact with PATCH requests. Use special " - "API to do this.") - raise exception.Forbidden(msg) - else: - msg = _("Unknown artifact status: %s.") % to_status - raise exception.BadRequest(msg) - - -def validate_visibility_transition(af, from_visibility, to_visibility): - if to_visibility == 'private': - if from_visibility != 'private': - msg = _("Cannot make artifact private again.") - raise exception.Forbidden() - elif to_visibility == 'public': - if af.status != 'active': - msg = _("Cannot change visibility to 'public' if artifact" - " is not active.") - raise exception.Forbidden(msg) - else: - msg = _("Unknown artifact visibility: %s.") % to_visibility - raise exception.BadRequest(msg) - - -def validate_change_allowed(af, field_name): - """Validate if fields can be set for the artifact.""" - if field_name not in af.fields: - msg = _("Cannot add new field '%s' to artifact.") % field_name - raise exception.BadRequest(msg) - if af.status not in ('active', 'drafted'): - msg = _("Forbidden to change fields " - "if artifact is not active or drafted.") - raise exception.Forbidden(message=msg) - if af.fields[field_name].system is True: - msg = _("Forbidden to specify system field %s. It is not " - "available for modifying by users.") % field_name - raise exception.Forbidden(msg) - if af.status == 'active' and not af.fields[field_name].mutable: - msg = (_("Forbidden to change field '%s' after activation.") - % field_name) - raise exception.Forbidden(message=msg) diff --git a/glare/common/wsgi.py b/glare/common/wsgi.py deleted file mode 100644 index e7e00d9..0000000 --- a/glare/common/wsgi.py +++ /dev/null @@ -1,834 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2010 OpenStack Foundation -# Copyright 2014 IBM Corp. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Utility methods for working with WSGI servers -""" -from __future__ import print_function - -import errno -import functools -import os -import signal -import sys -import time - -import eventlet -from eventlet.green import socket -from eventlet.green import ssl -import eventlet.greenio -import eventlet.wsgi -import glance_store -from oslo_concurrency import processutils -from oslo_config import cfg -from oslo_log import log as logging -from oslo_serialization import jsonutils -from oslo_utils import encodeutils -from oslo_utils import strutils -from osprofiler import opts as profiler_opts -import routes -import routes.middleware -import six -import webob.dec -import webob.exc -from webob import multidict - -from glare.common import exception as glare_exc -from glare.common import utils -from glare import i18n -from glare.i18n import _ - - -bind_opts = [ - cfg.HostAddressOpt('bind_host', default='0.0.0.0', - help=_('Address to bind the server. Useful when ' - 'selecting a particular network interface.')), - cfg.PortOpt('bind_port', - help=_('The port on which the server will listen.')), -] - -socket_opts = [ - cfg.IntOpt('backlog', default=4096, - help=_('The backlog value that will be used when creating the ' - 'TCP listener socket.')), - cfg.IntOpt('tcp_keepidle', default=600, - help=_('The value for the socket option TCP_KEEPIDLE. This is ' - 'the time in seconds that the connection must be idle ' - 'before TCP starts sending keepalive probes.')), - cfg.StrOpt('ca_file', help=_('CA certificate file to use to verify ' - 'connecting clients.')), - cfg.StrOpt('cert_file', help=_('Certificate file to use when starting API ' - 'server securely.')), - cfg.StrOpt('key_file', help=_('Private key file to use when starting API ' - 'server securely.')), -] - -eventlet_opts = [ - cfg.IntOpt('workers', default=0, min=0, - help=_('The number of child process workers that will be ' - 'created to service requests. The default will be ' - 'equal to the number of CPUs available.')), - cfg.IntOpt('max_header_line', default=16384, min=0, - help=_('Maximum line size of message headers to be accepted. ' - 'max_header_line may need to be increased when using ' - 'large tokens (typically those generated by the ' - 'Keystone v3 API with big service catalogs')), - cfg.BoolOpt('http_keepalive', default=True, - help=_('If False, server will return the header ' - '"Connection: close", ' - 'If True, server will return "Connection: Keep-Alive" ' - 'in its responses. In order to close the client socket ' - 'connection explicitly after the response is sent and ' - 'read successfully by the client, you simply have to ' - 'set this option to False when you create a wsgi ' - 'server.')), - cfg.IntOpt('client_socket_timeout', default=900, min=0, - help=_('Timeout for client connections\' socket operations. ' - 'If an incoming connection is idle for this number of ' - 'seconds it will be closed. A value of \'0\' means ' - 'wait forever.')), -] - -LOG = logging.getLogger(__name__) - -CONF = cfg.CONF -CONF.register_opts(bind_opts) -CONF.register_opts(socket_opts) -CONF.register_opts(eventlet_opts) -profiler_opts.set_defaults(CONF) - -ASYNC_EVENTLET_THREAD_POOL_LIST = [] - - -def get_num_workers(): - """Return the configured number of workers.""" - if CONF.workers == 0: - # 0 implies the number of CPUs - return processutils.get_worker_count() - return CONF.workers - - -def get_bind_addr(default_port=None): - """Return the host and port to bind to.""" - return (CONF.bind_host, CONF.bind_port or default_port) - - -def ssl_wrap_socket(sock): - """Wrap an existing socket in SSL - - :param sock: non-SSL socket to wrap - - :returns: An SSL wrapped socket - """ - utils.validate_key_cert(CONF.key_file, CONF.cert_file) - - ssl_kwargs = { - 'server_side': True, - 'certfile': CONF.cert_file, - 'keyfile': CONF.key_file, - 'cert_reqs': ssl.CERT_NONE, - } - - if CONF.ca_file: - ssl_kwargs['ca_certs'] = CONF.ca_file - ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED - - return ssl.wrap_socket(sock, **ssl_kwargs) - - -def get_socket(default_port): - """Bind socket to bind ip:port in conf - - :param default_port: port to bind to if none is specified in conf - - :returns: a socket object as returned from socket.listen or - ssl.wrap_socket if conf specifies cert_file - """ - bind_addr = get_bind_addr(default_port) - - # TODO(jaypipes): eventlet's greened socket module does not actually - # support IPv6 in getaddrinfo(). We need to get around this in the - # future or monitor upstream for a fix - address_family = [ - addr[0] for addr in socket.getaddrinfo(bind_addr[0], - bind_addr[1], - socket.AF_UNSPEC, - socket.SOCK_STREAM) - if addr[0] in (socket.AF_INET, socket.AF_INET6) - ][0] - - use_ssl = CONF.key_file or CONF.cert_file - if use_ssl and (not CONF.key_file or not CONF.cert_file): - raise RuntimeError(_("When running server in SSL mode, you must " - "specify both a cert_file and key_file " - "option value in your configuration file")) - - sock = utils.get_test_suite_socket() - retry_until = time.time() + 30 - - while not sock and time.time() < retry_until: - try: - sock = eventlet.listen(bind_addr, - backlog=CONF.backlog, - family=address_family) - except socket.error as err: - if err.args[0] != errno.EADDRINUSE: - raise - eventlet.sleep(0.1) - if not sock: - raise RuntimeError(_("Could not bind to %(host)s:%(port)s after" - " trying for 30 seconds") % - {'host': bind_addr[0], - 'port': bind_addr[1]}) - - return sock - - -def set_eventlet_hub(): - try: - eventlet.hubs.use_hub('poll') - except Exception: - try: - eventlet.hubs.use_hub('selects') - except Exception: - msg = _("eventlet 'poll' nor 'selects' hubs are available " - "on this platform") - raise glare_exc.WorkerCreationFailure( - reason=msg) - - -def initialize_glance_store(): - """Initialize glance store.""" - glance_store.register_opts(CONF) - glance_store.create_stores(CONF) - glance_store.verify_default_store() - - -def get_asynchronous_eventlet_pool(size=1000): - """Return eventlet pool to caller. - - Also store pools created in global list, to wait on - it after getting signal for graceful shutdown. - - :param size: eventlet pool size - :returns: eventlet pool - """ - global ASYNC_EVENTLET_THREAD_POOL_LIST - - pool = eventlet.GreenPool(size=size) - # Add pool to global ASYNC_EVENTLET_THREAD_POOL_LIST - ASYNC_EVENTLET_THREAD_POOL_LIST.append(pool) - - return pool - - -class Server(object): - """Server class to manage multiple WSGI sockets and applications. - - This class requires initialize_glance_store set to True if - glance store needs to be initialized. - """ - def __init__(self, threads=1000, initialize_glance_store=False): - os.umask(0o27) # ensure files are created with the correct privileges - self._logger = logging.getLogger("eventlet.wsgi.server") - self.threads = threads - self.children = set() - self.stale_children = set() - self.running = True - self.initialize_glance_store = initialize_glance_store - self.pgid = os.getpid() - try: - os.setpgid(self.pgid, self.pgid) - except OSError: - self.pgid = 0 - - def hup(self, *args): - """Reloads configuration files with zero down time - """ - signal.signal(signal.SIGHUP, signal.SIG_IGN) - raise glare_exc.SIGHUPInterrupt - - def kill_children(self, *args): - """Kills the entire process group.""" - signal.signal(signal.SIGTERM, signal.SIG_IGN) - signal.signal(signal.SIGINT, signal.SIG_IGN) - self.running = False - os.killpg(self.pgid, signal.SIGTERM) - - def start(self, application, default_port): - """Run a WSGI server with the given application. - - :param application: The application to be run in the WSGI server - :param default_port: Port to bind to if none is specified in conf - """ - self.application = application - self.default_port = default_port - self.configure() - self.start_wsgi() - - def start_wsgi(self): - workers = get_num_workers() - if workers is None: - # Useful for profiling, test, debug etc. - self.pool = self.create_pool() - self.pool.spawn_n(self._single_run, self.application, self.sock) - return - else: - LOG.info("Starting %d workers", workers) - signal.signal(signal.SIGTERM, self.kill_children) - signal.signal(signal.SIGINT, self.kill_children) - signal.signal(signal.SIGHUP, self.hup) - while len(self.children) < workers: - self.run_child() - - def create_pool(self): - return get_asynchronous_eventlet_pool(size=self.threads) - - def _remove_children(self, pid): - if pid in self.children: - self.children.remove(pid) - LOG.info('Removed dead child %s', pid) - elif pid in self.stale_children: - self.stale_children.remove(pid) - LOG.info('Removed stale child %s', pid) - else: - LOG.warning('Unrecognised child %s', pid) - - def _verify_and_respawn_children(self, pid, status): - if len(self.stale_children) == 0: - LOG.debug('No stale children') - if os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0: - LOG.error('Not respawning child %d, cannot ' - 'recover from termination', pid) - if not self.children and not self.stale_children: - LOG.info('All workers have terminated. Exiting') - self.running = False - else: - if len(self.children) < get_num_workers(): - self.run_child() - - def wait_on_children(self): - while self.running: - try: - pid, status = os.wait() - if os.WIFEXITED(status) or os.WIFSIGNALED(status): - self._remove_children(pid) - self._verify_and_respawn_children(pid, status) - except OSError as err: - if err.errno not in (errno.EINTR, errno.ECHILD): - raise - except KeyboardInterrupt: - LOG.info('Caught keyboard interrupt. Exiting.') - break - except glare_exc.SIGHUPInterrupt: - self.reload() - continue - eventlet.greenio.shutdown_safe(self.sock) - self.sock.close() - LOG.debug('Exited') - - def configure(self, old_conf=None, has_changed=None): - """Apply configuration settings - - :param old_conf: Cached old configuration settings (if any) - :param has_changed: callable to determine if a parameter has changed - """ - eventlet.wsgi.MAX_HEADER_LINE = CONF.max_header_line - self.client_socket_timeout = CONF.client_socket_timeout or None - - # determine if we need to reload artifact type definitions - if old_conf is not None and ( - has_changed('enabled_artifact_types') or - has_changed('custom_artifact_types_modules')): - from glare import engine - engine.Engine.registry.reset_registry() - engine.Engine.registry.register_all_artifacts() - - self.configure_socket(old_conf, has_changed) - if self.initialize_glance_store: - initialize_glance_store() - - def reload(self): - """Reload and re-apply configuration settings - - Existing child processes are sent a SIGHUP signal - and will exit after completing existing requests. - New child processes, which will have the updated - configuration, are spawned. This allows preventing - interruption to the service. - """ - def _has_changed(old, new, param): - old = old.get(param) - new = getattr(new, param) - return new != old - - old_conf = utils.stash_conf_values() - has_changed = functools.partial(_has_changed, old_conf, CONF) - CONF.reload_config_files() - os.killpg(self.pgid, signal.SIGHUP) - self.stale_children = self.children - self.children = set() - - # Ensure any logging config changes are picked up - logging.setup(CONF, 'glare') - - self.configure(old_conf, has_changed) - self.start_wsgi() - - def wait(self): - """Wait until all servers have completed running.""" - try: - if self.children: - self.wait_on_children() - else: - self.pool.waitall() - except KeyboardInterrupt: - pass - - def run_child(self): - def child_hup(*args): - """Shuts down child processes, existing requests are handled.""" - signal.signal(signal.SIGHUP, signal.SIG_IGN) - eventlet.wsgi.is_accepting = False - self.sock.close() - - pid = os.fork() - if pid == 0: - signal.signal(signal.SIGHUP, child_hup) - signal.signal(signal.SIGTERM, signal.SIG_DFL) - # ignore the interrupt signal to avoid a race whereby - # a child worker receives the signal before the parent - # and is respawned unnecessarily as a result - signal.signal(signal.SIGINT, signal.SIG_IGN) - # The child has no need to stash the unwrapped - # socket, and the reference prevents a clean - # exit on sighup - self._sock = None - self.run_server() - LOG.info('Child %d exiting normally', os.getpid()) - # self.pool.waitall() is now called in wsgi's server so - # it's safe to exit here - sys.exit(0) - else: - LOG.info('Started child %s', pid) - self.children.add(pid) - - def run_server(self): - """Run a WSGI server.""" - eventlet.wsgi.HttpProtocol.default_request_version = "HTTP/1.0" - self.pool = self.create_pool() - try: - eventlet.wsgi.server(self.sock, - self.application, - log=self._logger, - custom_pool=self.pool, - debug=False, - keepalive=CONF.http_keepalive, - socket_timeout=self.client_socket_timeout) - except socket.error as err: - if err[0] != errno.EINVAL: - raise - - # waiting on async pools - if ASYNC_EVENTLET_THREAD_POOL_LIST: - for pool in ASYNC_EVENTLET_THREAD_POOL_LIST: - pool.waitall() - - def _single_run(self, application, sock): - """Start a WSGI server in a new green thread.""" - LOG.info("Starting single process server") - eventlet.wsgi.server(sock, application, custom_pool=self.pool, - log=self._logger, - debug=False, - keepalive=CONF.http_keepalive, - socket_timeout=self.client_socket_timeout) - - def configure_socket(self, old_conf=None, has_changed=None): - """Ensure a socket exists and is appropriately configured. - - This function is called on start up, and can also be - called in the event of a configuration reload. - - When called for the first time a new socket is created. - If reloading and either bind_host or bind_port have been - changed the existing socket must be closed and a new - socket opened (laws of physics). - - In all other cases (bind_host/bind_port have not changed) - the existing socket is reused. - - :param old_conf: Cached old configuration settings (if any) - :param has_changed: callable to determine if a parameter has changed - """ - # Do we need a fresh socket? - new_sock = (old_conf is None or ( - has_changed('bind_host') or - has_changed('bind_port'))) - # Will we be using https? - use_ssl = not (not CONF.cert_file or not CONF.key_file) - # Were we using https before? - old_use_ssl = (old_conf is not None and not ( - not old_conf.get('key_file') or - not old_conf.get('cert_file'))) - # Do we now need to perform an SSL wrap on the socket? - wrap_sock = use_ssl is True and (old_use_ssl is False or new_sock) - # Do we now need to perform an SSL unwrap on the socket? - unwrap_sock = use_ssl is False and old_use_ssl is True - - if new_sock: - self._sock = None - if old_conf is not None: - self.sock.close() - _sock = get_socket(self.default_port) - _sock.setsockopt(socket.SOL_SOCKET, - socket.SO_REUSEADDR, 1) - # sockets can hang around forever without keepalive - _sock.setsockopt(socket.SOL_SOCKET, - socket.SO_KEEPALIVE, 1) - self._sock = _sock - - if wrap_sock: - self.sock = ssl_wrap_socket(self._sock) - - if unwrap_sock or new_sock and not use_ssl: - self.sock = self._sock - - # Pick up newly deployed certs - if old_conf is not None and use_ssl is True and old_use_ssl is True: - if has_changed('cert_file') or has_changed('key_file'): - utils.validate_key_cert(CONF.key_file, CONF.cert_file) - if has_changed('cert_file'): - self.sock.certfile = CONF.cert_file - if has_changed('key_file'): - self.sock.keyfile = CONF.key_file - - if new_sock or (old_conf is not None and has_changed('tcp_keepidle')): - # This option isn't available in the OS X version of eventlet - if hasattr(socket, 'TCP_KEEPIDLE'): - self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, - CONF.tcp_keepidle) - - if old_conf is not None and has_changed('backlog'): - self.sock.listen(CONF.backlog) - - -class APIMapper(routes.Mapper): - """Handle route matching when url is '' because routes.Mapper returns - an error in this case. - """ - - def routematch(self, url=None, environ=None): - if url is "": - result = self._match("", environ) - return result[0], result[1] - return routes.Mapper.routematch(self, url, environ) - - -class RejectMethodController(object): - def reject(self, req, allowed_methods, *args, **kwargs): - LOG.debug("The method %s is not allowed for this resource", - req.environ['REQUEST_METHOD']) - raise webob.exc.HTTPMethodNotAllowed( - headers=[('Allow', allowed_methods)]) - - -class Router(object): - """WSGI middleware that maps incoming requests to WSGI apps. - """ - - def __init__(self, mapper): - """Create a router for the given routes.Mapper. - - Each route in `mapper` must specify a 'controller', which is a - WSGI app to call. You'll probably want to specify an 'action' as - well and have your controller be a wsgi.Controller, who will route - the request to the action method. - - Examples: - mapper = routes.Mapper() - sc = ServerController() - - # Explicit mapping of one route to a controller+action - mapper.connect(None, "/svrlist", controller=sc, action="list") - - # Actions are all implicitly defined - mapper.resource("server", "servers", controller=sc) - - # Pointing to an arbitrary WSGI app. You can specify the - # {path_info:.*} parameter so the target app can be handed just that - # section of the URL. - mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp()) - """ - mapper.redirect("", "/") - self.map = mapper - self._router = routes.middleware.RoutesMiddleware(self._dispatch, - self.map) - - @classmethod - def factory(cls, global_conf, **local_conf): - return cls(APIMapper()) - - @webob.dec.wsgify - def __call__(self, req): - """Route the incoming request to a controller based on self.map. - If no match, return either a 404(Not Found) or 501(Not Implemented). - """ - return self._router - - @staticmethod - @webob.dec.wsgify - def _dispatch(req): - """Called by self._router after matching the incoming request to a route - and putting the information into req.environ. Either returns 404, - 501, or the routed WSGI app's response. - """ - match = req.environ['wsgiorg.routing_args'][1] - if not match: - implemented_http_methods = ['GET', 'HEAD', 'POST', 'PUT', - 'DELETE', 'PATCH'] - if req.environ['REQUEST_METHOD'] not in implemented_http_methods: - return webob.exc.HTTPNotImplemented() - else: - return webob.exc.HTTPNotFound() - app = match['controller'] - return app - - -class Request(webob.Request): - """Add some OpenStack API-specific logic to the base webob.Request.""" - - def best_match_content_type(self): - """Determine the requested response content-type.""" - supported = ('application/json',) - bm = self.accept.best_match(supported) - return bm or 'application/json' - - def best_match_language(self): - """Determines best available locale from the Accept-Language header. - - :returns: the best language match or None if the 'Accept-Language' - header was not available in the request. - """ - if not self.accept_language: - return None - langs = i18n.get_available_languages('glare') - return self.accept_language.best_match(langs) - - def get_content_range(self): - """Return the `Range` in a request.""" - range_str = self.headers.get('Content-Range') - if range_str is not None: - range_ = webob.byterange.ContentRange.parse(range_str) - if range_ is None: - msg = _('Malformed Content-Range header: %s') % range_str - raise webob.exc.HTTPBadRequest(explanation=msg) - return range_ - - -class JSONRequestDeserializer(object): - valid_transfer_encoding = frozenset(['chunked', 'compress', 'deflate', - 'gzip', 'identity']) - - httpverb_may_have_body = frozenset({'POST', 'PUT', 'PATCH'}) - - @classmethod - def is_valid_encoding(cls, request): - request_encoding = request.headers.get('transfer-encoding', '').lower() - return request_encoding in cls.valid_transfer_encoding - - @classmethod - def is_valid_method(cls, request): - return request.method.upper() in cls.httpverb_may_have_body - - def has_body(self, request): - """Returns whether a Webob.Request object will possess an entity body. - - :param request: Webob.Request object - """ - - if self.is_valid_encoding(request) and self.is_valid_method(request): - request.is_body_readable = True - return True - - if request.content_length is not None and request.content_length > 0: - return True - return False - - @staticmethod - def _sanitizer(obj): - """Sanitizer method that will be passed to jsonutils.loads.""" - return obj - - def from_json(self, datastring): - try: - jsondata = jsonutils.loads(datastring, object_hook=self._sanitizer) - if not isinstance(jsondata, (dict, list)): - msg = _('Unexpected body type. Expected list/dict.') - raise webob.exc.HTTPBadRequest(explanation=msg) - return jsondata - except ValueError: - msg = _('Malformed JSON in request body.') - raise webob.exc.HTTPBadRequest(explanation=msg) - - def default(self, request): - if self.has_body(request): - return {'body': self.from_json(request.body)} - else: - return {} - - -class JSONResponseSerializer(object): - - def _sanitizer(self, obj): - """Sanitizer method that will be passed to jsonutils.dumps.""" - if hasattr(obj, "to_dict"): - return obj.to_dict() - if isinstance(obj, multidict.MultiDict): - return obj.mixed() - return jsonutils.to_primitive(obj) - - def to_json(self, data): - return jsonutils.dump_as_bytes(data, default=self._sanitizer) - - def default(self, response, result): - response.content_type = 'application/json' - body = self.to_json(result) - body = encodeutils.to_utf8(body) - response.body = body - - -def translate_exception(req, e): - """Translates all translatable elements of the given exception.""" - - # The RequestClass attribute in the webob.dec.wsgify decorator - # does not guarantee that the request object will be a particular - # type; this check is therefore necessary. - if not hasattr(req, "best_match_language"): - return e - - locale = req.best_match_language() - - if isinstance(e, webob.exc.HTTPError): - e.explanation = i18n.translate(e.explanation, locale) - e.detail = i18n.translate(e.detail, locale) - if getattr(e, 'body_template', None): - e.body_template = i18n.translate(e.body_template, locale) - return e - - -class Resource(object): - """WSGI app that handles (de)serialization and controller dispatch. - - Reads routing information supplied by RoutesMiddleware and calls - the requested action method upon its deserializer, controller, - and serializer. Those three objects may implement any of the basic - controller action methods (create, update, show, index, delete) - along with any that may be specified in the api router. A 'default' - method may also be implemented to be used in place of any - non-implemented actions. Deserializer methods must accept a request - argument and return a dictionary. Controller methods must accept a - request argument. Additionally, they must also accept keyword - arguments that represent the keys returned by the Deserializer. They - may raise a webob.exc exception or return a dict, which will be - serialized by requested content type. - """ - - def __init__(self, controller, deserializer=None, serializer=None): - """ - :param controller: object that implement methods created by routes lib - :param deserializer: object that supports webob request deserialization - through controller-like actions - :param serializer: object that supports webob response serialization - through controller-like actions - """ - self.controller = controller - self.serializer = serializer or JSONResponseSerializer() - self.deserializer = deserializer or JSONRequestDeserializer() - - @webob.dec.wsgify(RequestClass=Request) - def __call__(self, request): - """WSGI method that controls (de)serialization and method dispatch.""" - action_args = self.get_action_args(request.environ) - action = action_args.pop('action', None) - body_reject = strutils.bool_from_string( - action_args.pop('body_reject', None)) - - try: - if body_reject and self.deserializer.has_body(request): - msg = _('A body is not expected with this request.') - raise webob.exc.HTTPBadRequest(explanation=msg) - deserialized_request = self.dispatch(self.deserializer, - action, request) - action_args.update(deserialized_request) - action_result = self.dispatch(self.controller, action, - request, **action_args) - except webob.exc.WSGIHTTPException as e: - exc_info = sys.exc_info() - e = translate_exception(request, e) - six.reraise(type(e), e, exc_info[2]) - except glare_exc.GlareException: - raise - except UnicodeDecodeError: - msg = _("Error decoding your request. Either the URL or the " - "request body contained characters that could not be " - "decoded by Glare") - raise webob.exc.HTTPBadRequest(explanation=msg) - except Exception as e: - LOG.exception("Caught error: %s", - encodeutils.exception_to_unicode(e)) - response = webob.exc.HTTPInternalServerError(explanation=str(e)) - return response - - try: - response = webob.Response(request=request) - self.dispatch(self.serializer, action, response, action_result) - # encode all headers in response to utf-8 to prevent unicode errors - for name, value in list(response.headers.items()): - if six.PY2 and isinstance(value, six.text_type): - response.headers[name] = encodeutils.safe_encode(value) - return response - except webob.exc.WSGIHTTPException as e: - return translate_exception(request, e) - except webob.exc.HTTPException as e: - return e - except glare_exc.GlareException: - raise - # return unserializable result (typically a webob exc) - except Exception: - return action_result - - def dispatch(self, obj, action, *args, **kwargs): - """Find action-specific method on self and call it.""" - try: - method = getattr(obj, action) - except AttributeError: - method = getattr(obj, 'default') - - return method(*args, **kwargs) - - def get_action_args(self, request_environment): - """Parse dictionary created by routes library.""" - try: - args = request_environment['wsgiorg.routing_args'][1].copy() - except Exception: - return {} - - args.pop("controller", None) - args.pop("format", None) - - return args diff --git a/glare/db/__init__.py b/glare/db/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/db/artifact_api.py b/glare/db/artifact_api.py deleted file mode 100644 index ec79434..0000000 --- a/glare/db/artifact_api.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Database API for all artifact types""" - -from oslo_db import exception as db_exception -from oslo_log import log as logging -from retrying import retry - -from glare.db.sqlalchemy import api -from glare import locking - -LOG = logging.getLogger(__name__) - - -def _retry_on_connection_error(exc): - """Function to retry a DB API call if connection error was received.""" - - if isinstance(exc, db_exception.DBConnectionError): - LOG.warning("Connection error detected. Retrying...") - return True - return False - - -class ArtifactAPI(object): - - def _serialize_values(self, values): - new_values = {} - if 'tags' in values: - new_values['tags'] = values.pop('tags') if values['tags'] else [] - for key, value in values.items(): - if key in api.BASE_ARTIFACT_PROPERTIES: - new_values[key] = value - else: - new_values.setdefault('properties', {})[key] = value - return new_values - - @retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000, - stop_max_attempt_number=20) - def save(self, context, artifact_id, values): - """Save artifact values in database - - :param artifact_id: id of artifact that needs to be updated - :param context: user context - :param values: values that needs to be updated - :return: dict of updated artifact values - """ - session = api.get_session() - return api.create_or_update( - context, artifact_id, self._serialize_values(values), session) - - @retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000, - stop_max_attempt_number=20) - def update_blob(self, context, artifact_id, values): - """Create and update blob records in db - - :param artifact_id: id of artifact that needs to be updated - :param context: user context - :param values: blob values that needs to be updated - :return: dict of updated artifact values - """ - session = api.get_session() - return api.create_or_update( - context, artifact_id, {'blobs': values}, session) - - @retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000, - stop_max_attempt_number=20) - def delete(self, context, artifact_id): - """Delete artifacts from db - - :param context: user context - :param artifact_id: id of artifact that needs to be deleted - """ - session = api.get_session() - api.delete(context, artifact_id, session) - - @retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000, - stop_max_attempt_number=20) - def get(self, context, artifact_id): - """Return artifact values from database - - :param context: user context - :param artifact_id: id of the artifact - :return: dict of artifact values - """ - session = api.get_session() - return api.get(context, artifact_id, session) - - @retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000, - stop_max_attempt_number=20) - def list(self, context, filters, marker, limit, sort, latest): - """List artifacts from db - - :param context: user request context - :param filters: filter conditions from url - :param marker: id of first artifact where we need to start - artifact lookup - :param limit: max number of items in list - :param sort: sort conditions - :param latest: flag that indicates, that only artifacts with highest - versions should be returned in output - :return: list of artifacts. Each artifact is represented as dict of - values. - """ - session = api.get_session() - return api.get_all(context=context, session=session, filters=filters, - marker=marker, limit=limit, sort=sort, - latest=latest) - - -class ArtifactLockApi(locking.LockApiBase): - @retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000, - stop_max_attempt_number=20) - def create_lock(self, context, lock_key): - session = api.get_session() - return api.create_lock(context, lock_key, session) - - @retry(retry_on_exception=_retry_on_connection_error, wait_fixed=1000, - stop_max_attempt_number=20) - def delete_lock(self, context, lock_id): - session = api.get_session() - api.delete_lock(context, lock_id, session) diff --git a/glare/db/migration/__init__.py b/glare/db/migration/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/db/migration/alembic.ini b/glare/db/migration/alembic.ini deleted file mode 100644 index dc70416..0000000 --- a/glare/db/migration/alembic.ini +++ /dev/null @@ -1,54 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = glare/db/migration/alembic_migrations - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# max length of characters to apply to the -# "slug" field -#truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -sqlalchemy.url = - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S \ No newline at end of file diff --git a/glare/db/migration/alembic_migrations/README b/glare/db/migration/alembic_migrations/README deleted file mode 100644 index 9d0636b..0000000 --- a/glare/db/migration/alembic_migrations/README +++ /dev/null @@ -1,15 +0,0 @@ -Please see https://alembic.readthedocs.org/en/latest/index.html for general documentation - -To create alembic migrations use: -$ glare-db-manage revision --message --autogenerate - -Stamp db with most recent migration version, without actually running migrations -$ glare-db-manage stamp --revision head - -Upgrade can be performed by: -$ glare-db-manage upgrade -$ glare-db-manage upgrade --revision head - -Downgrading db: -$ glare-db-manage downgrade -$ glare-db-manage downgrade --revision base diff --git a/glare/db/migration/alembic_migrations/env.py b/glare/db/migration/alembic_migrations/env.py deleted file mode 100644 index 1adc42a..0000000 --- a/glare/db/migration/alembic_migrations/env.py +++ /dev/null @@ -1,45 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from alembic import context - -from glare.db.sqlalchemy import api -from glare.db.sqlalchemy import models - - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -target_metadata = models.BASE.metadata - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - engine = api.get_engine() - - with engine.connect() as connection: - context.configure(connection=connection, - target_metadata=target_metadata) - with context.begin_transaction(): - context.run_migrations() - - -run_migrations_online() diff --git a/glare/db/migration/alembic_migrations/script.py.mako b/glare/db/migration/alembic_migrations/script.py.mako deleted file mode 100644 index 4a23e95..0000000 --- a/glare/db/migration/alembic_migrations/script.py.mako +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright ${create_date.year} OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision} -Create Date: ${create_date} - -""" - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} \ No newline at end of file diff --git a/glare/db/migration/alembic_migrations/versions/001_initial_version.py b/glare/db/migration/alembic_migrations/versions/001_initial_version.py deleted file mode 100644 index 4936dbe..0000000 --- a/glare/db/migration/alembic_migrations/versions/001_initial_version.py +++ /dev/null @@ -1,167 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Initial version - -Revision ID: 001 -Revises: None -Create Date: 2016-08-18 12:28:37.372366 - -""" - -# revision identifiers, used by Alembic. -revision = '001' -down_revision = None - -from alembic import op -import sqlalchemy as sa - - -MYSQL_ENGINE = 'InnoDB' -MYSQL_CHARSET = 'utf8' - - -def upgrade(): - op.create_table( - 'glare_artifacts', - sa.Column('id', sa.String(36), primary_key=True, nullable=False), - sa.Column('name', sa.String(255), nullable=False), - sa.Column('type_name', sa.String(255), nullable=False), - sa.Column('version_prefix', sa.BigInteger(), nullable=False), - sa.Column('version_suffix', sa.String(255)), - sa.Column('version_meta', sa.String(255)), - sa.Column('description', sa.Text()), - sa.Column('visibility', sa.String(32), nullable=False), - sa.Column('status', sa.String(32), nullable=False), - sa.Column('owner', sa.String(255)), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('activated_at', sa.DateTime()), - sa.PrimaryKeyConstraint('id'), - mysql_engine=MYSQL_ENGINE, - mysql_charset=MYSQL_CHARSET - ) - - op.create_index('ix_glare_artifact_name_and_version', - 'glare_artifacts', - ['name', 'version_prefix', 'version_suffix'] - ) - op.create_index('ix_glare_artifact_type', - 'glare_artifacts', - ['type_name'] - ) - op.create_index('ix_glare_artifact_status', - 'glare_artifacts', - ['status'] - ) - op.create_index('ix_glare_artifact_owner', - 'glare_artifacts', - ['owner'] - ) - op.create_index('ix_glare_artifact_visibility', - 'glare_artifacts', - ['visibility'] - ) - - op.create_table( - 'glare_artifact_tags', - sa.Column('id', sa.String(36), primary_key=True, nullable=False), - sa.Column('artifact_id', sa.String(36), - sa.ForeignKey('glare_artifacts.id'), nullable=False), - sa.Column('value', sa.String(255), nullable=False), - sa.PrimaryKeyConstraint('id'), - mysql_engine=MYSQL_ENGINE, - mysql_charset=MYSQL_CHARSET - ) - - op.create_index('ix_glare_artifact_tags_artifact_id', - 'glare_artifact_tags', - ['artifact_id'] - ) - op.create_index('ix_glare_artifact_tags_artifact_id_tag_value', - 'glare_artifact_tags', - ['artifact_id', 'value'] - ) - - op.create_table( - 'glare_artifact_blobs', - sa.Column('id', sa.String(36), primary_key=True, nullable=False), - sa.Column('artifact_id', sa.String(36), - sa.ForeignKey('glare_artifacts.id'), nullable=False), - sa.Column('size', sa.BigInteger()), - sa.Column('md5', sa.String(32)), - sa.Column('sha1', sa.String(40)), - sa.Column('sha256', sa.String(64)), - sa.Column('name', sa.String(255), nullable=False), - sa.Column('status', sa.String(32), nullable=False), - sa.Column('external', sa.Boolean()), - sa.Column('url', sa.Text()), - sa.Column('key_name', sa.String(255)), - sa.Column('content_type', sa.String(255)), - sa.PrimaryKeyConstraint('id'), - mysql_engine=MYSQL_ENGINE, - mysql_charset=MYSQL_CHARSET - ) - - op.create_index('ix_glare_artifact_blobs_artifact_id', - 'glare_artifact_blobs', - ['artifact_id'] - ) - op.create_index('ix_glare_artifact_blobs_name', - 'glare_artifact_blobs', - ['name'] - ) - - op.create_table( - 'glare_artifact_properties', - sa.Column('id', sa.String(36), primary_key=True, nullable=False), - sa.Column('artifact_id', sa.String(36), - sa.ForeignKey('glare_artifacts.id'), nullable=False), - sa.Column('name', sa.String(255), nullable=False), - sa.Column('string_value', sa.String(20000)), - sa.Column('int_value', sa.Integer()), - sa.Column('numeric_value', sa.Numeric()), - sa.Column('bool_value', sa.Boolean()), - sa.Column('position', sa.Integer()), - sa.Column('key_name', sa.String(255)), - sa.PrimaryKeyConstraint('id'), - mysql_engine=MYSQL_ENGINE, - mysql_charset=MYSQL_CHARSET - ) - - op.create_index('ix_glare_artifact_properties_artifact_id', - 'glare_artifact_properties', - ['artifact_id'] - ) - op.create_index('ix_glare_artifact_properties_name', - 'glare_artifact_properties', - ['name'] - ) - - op.create_table( - 'glare_artifact_locks', - sa.Column('id', sa.String(255), primary_key=True, nullable=False), - sa.PrimaryKeyConstraint('id'), - mysql_engine=MYSQL_ENGINE, - mysql_charset=MYSQL_CHARSET - ) - - -def downgrade(): - op.drop_table('glare_artifact_locks') - op.drop_table('glare_artifact_properties') - op.drop_table('glare_artifact_blobs') - op.drop_table('glare_artifact_tags') - op.drop_table('glare_artifacts') - - # end Alembic commands # diff --git a/glare/db/migration/alembic_migrations/versions/002_add_acquired_at_column.py b/glare/db/migration/alembic_migrations/versions/002_add_acquired_at_column.py deleted file mode 100644 index 7e42301..0000000 --- a/glare/db/migration/alembic_migrations/versions/002_add_acquired_at_column.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2016 OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Add acquired_at column - -Revision ID: 002 -Revises: 001 -Create Date: 2016-10-05 16:03:43.207147 - -""" - -# revision identifiers, used by Alembic. -revision = '002' -down_revision = '001' - -from alembic import op -import sqlalchemy as sa - -MYSQL_ENGINE = 'InnoDB' -MYSQL_CHARSET = 'utf8' - - -def upgrade(): - op.drop_table('glare_artifact_locks') - - op.create_table( - 'glare_artifact_locks', - sa.Column('id', sa.String(255), primary_key=True, nullable=False), - sa.Column('acquired_at', sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint('id'), - mysql_engine=MYSQL_ENGINE, - mysql_charset=MYSQL_CHARSET - ) - - -def downgrade(): - op.drop_table('glare_artifact_locks') - - op.create_table( - 'glare_artifact_locks', - sa.Column('id', sa.String(255), primary_key=True, nullable=False), - sa.PrimaryKeyConstraint('id'), - mysql_engine=MYSQL_ENGINE, - mysql_charset=MYSQL_CHARSET - ) diff --git a/glare/db/migration/alembic_migrations/versions/003_add_database_blob_storage.py b/glare/db/migration/alembic_migrations/versions/003_add_database_blob_storage.py deleted file mode 100644 index 8af24f2..0000000 --- a/glare/db/migration/alembic_migrations/versions/003_add_database_blob_storage.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2017 OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Add acquired_at column - -Revision ID: 003 -Revises: 002 -Create Date: 2017-01-10 12:53:25.108149 - -""" - -# revision identifiers, used by Alembic. -revision = '003' -down_revision = '002' - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import mysql - -MYSQL_ENGINE = 'InnoDB' -MYSQL_CHARSET = 'utf8' - - -def upgrade(): - op.create_table( - 'glare_blob_data', - sa.Column('id', sa.String(255), primary_key=True, nullable=False), - # Because of strange behavior of mysql LargeBinary is converted to - # BLOB instead of LONGBLOB. So we have to fix it explicitly with - # 'with_variant' call. - sa.Column( - 'data', - sa.LargeBinary().with_variant(mysql.LONGBLOB(), 'mysql'), - nullable=False), - sa.PrimaryKeyConstraint('id'), - mysql_engine=MYSQL_ENGINE, - mysql_charset=MYSQL_CHARSET - ) - - -def downgrade(): - op.drop_table('glare_blob_data') diff --git a/glare/db/migration/migration.py b/glare/db/migration/migration.py deleted file mode 100644 index 3befd2e..0000000 --- a/glare/db/migration/migration.py +++ /dev/null @@ -1,86 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os - -import alembic -from alembic import config as alembic_config -from alembic import migration as alembic_migration - -from glare.db.sqlalchemy import api as db_api - - -def get_alembic_config(): - path = os.path.join(os.path.dirname(__file__), 'alembic.ini') - - config = alembic_config.Config(path) - config.set_main_option('script_location', - 'glare.db.migration:alembic_migrations') - return config - - -def version(engine=None): - """Returns current database version.""" - engine = engine or db_api.get_engine() - with engine.connect() as conn: - context = alembic_migration.MigrationContext.configure(conn) - return context.get_current_revision() - - -def upgrade(revision, config=None): - """Used for upgrading database. - - :param revision: Desired database version - :type revision: string - """ - revision = revision or 'head' - config = config or get_alembic_config() - - alembic.command.upgrade(config, revision or 'head') - - -def downgrade(revision, config=None): - """Used for downgrading database. - - :param revision: Desired database version7 - :type revision: string - """ - revision = revision or 'base' - config = config or get_alembic_config() - return alembic.command.downgrade(config, revision) - - -def stamp(revision, config=None): - """Stamps database with provided revision. - - Don't run any migrations. - - :param revision: Should match one from repository or head - to stamp - database with most recent revision - :type revision: string - """ - config = config or get_alembic_config() - return alembic.command.stamp(config, revision=revision) - - -def revision(message=None, autogenerate=False, config=None): - """Creates template for migration. - - :param message: Text that will be used for migration title - :type message: string - :param autogenerate: If True - generates diff based on current database - state - :type autogenerate: bool - """ - config = config or get_alembic_config() - return alembic.command.revision(config, message=message, - autogenerate=autogenerate) diff --git a/glare/db/sqlalchemy/__init__.py b/glare/db/sqlalchemy/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/db/sqlalchemy/api.py b/glare/db/sqlalchemy/api.py deleted file mode 100644 index 0dca5ba..0000000 --- a/glare/db/sqlalchemy/api.py +++ /dev/null @@ -1,671 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import operator -import threading - -from oslo_config import cfg -from oslo_db import exception as db_exception -from oslo_db import options -from oslo_db.sqlalchemy import session -from oslo_log import log as os_logging -from oslo_utils import timeutils -import osprofiler.sqlalchemy -from retrying import retry -import six -import sqlalchemy -from sqlalchemy import and_ -import sqlalchemy.exc -from sqlalchemy import exists -from sqlalchemy import func -from sqlalchemy import or_ -import sqlalchemy.orm as orm -from sqlalchemy.orm import aliased -from sqlalchemy.orm import joinedload - -from glare.common import exception -from glare.common import semver_db -from glare.common import utils -from glare.db.sqlalchemy import models -from glare.i18n import _ - -LOG = os_logging.getLogger(__name__) - -CONF = cfg.CONF -CONF.import_group("profiler", "glare.common.wsgi") -options.set_defaults(CONF) - - -BASE_ARTIFACT_PROPERTIES = ('id', 'visibility', 'created_at', 'updated_at', - 'activated_at', 'owner', 'status', 'description', - 'name', 'type_name', 'version') - -_FACADE = None -_LOCK = threading.Lock() - - -def _retry_on_deadlock(exc): - """Decorator to retry a DB API call if Deadlock was received.""" - - if isinstance(exc, db_exception.DBDeadlock): - LOG.warning("Deadlock detected. Retrying...") - return True - return False - - -def _create_facade_lazily(): - global _LOCK, _FACADE - if _FACADE is None: - with _LOCK: - if _FACADE is None: - _FACADE = session.EngineFacade.from_config(CONF) - - if CONF.profiler.enabled and CONF.profiler.trace_sqlalchemy: - osprofiler.sqlalchemy.add_tracing(sqlalchemy, - _FACADE.get_engine(), - "db") - return _FACADE - - -def get_engine(): - facade = _create_facade_lazily() - return facade.get_engine() - - -def get_session(autocommit=True, expire_on_commit=False): - facade = _create_facade_lazily() - return facade.get_session(autocommit=autocommit, - expire_on_commit=expire_on_commit) - - -def setup_db(): - engine = get_engine() - models.register_models(engine) - - -def drop_db(): - engine = get_engine() - models.unregister_models(engine) - - -@retry(retry_on_exception=_retry_on_deadlock, wait_fixed=500, - stop_max_attempt_number=50) -def delete(context, artifact_id, session): - with session.begin(): - session.query(models.Artifact).filter_by(id=artifact_id).delete() - - -def _drop_protected_attrs(model_class, values): - """Removed protected attributes from values dictionary using the models - __protected_attributes__ field. - """ - for attr in model_class.__protected_attributes__: - if attr in values: - del values[attr] - - -@retry(retry_on_exception=_retry_on_deadlock, wait_fixed=500, - stop_max_attempt_number=50) -@utils.no_4byte_params -def create_or_update(context, artifact_id, values, session): - with session.begin(): - _drop_protected_attrs(models.Artifact, values) - if artifact_id is None: - # create new artifact - artifact = models.Artifact() - artifact.id = values.pop('id') - else: - # update the existing artifact - artifact = _get(context, artifact_id, session) - - if 'version' in values: - values['version'] = semver_db.parse(values['version']) - - if 'tags' in values: - tags = values.pop('tags') - artifact.tags = _do_tags(artifact, tags) - - if 'properties' in values: - properties = values.pop('properties', {}) - artifact.properties = _do_properties(artifact, properties) - - if 'blobs' in values: - blobs = values.pop('blobs') - artifact.blobs = _do_blobs(artifact, blobs) - - artifact.updated_at = timeutils.utcnow() - if 'status' in values: - if session.query(exists().where(and_( - models.ArtifactBlob.status == 'saving', - models.ArtifactBlob.artifact_id == artifact_id)) - ).one()[0]: - raise exception.Conflict( - "You cannot change artifact status if it has " - "uploading blobs.") - if values['status'] == 'active': - artifact.activated_at = timeutils.utcnow() - artifact.update(values) - - LOG.debug('Sending request to the database. ' - 'New values are %s', values) - artifact.save(session=session) - LOG.debug('Response from the database was received.') - - return artifact.to_dict() - - -def _get(context, artifact_id, session): - try: - query = _do_artifacts_query(context, session).filter_by( - id=artifact_id) - artifact = query.one() - except orm.exc.NoResultFound: - msg = _("Artifact with id=%s not found.") % artifact_id - LOG.warning(msg) - raise exception.ArtifactNotFound(msg) - return artifact - - -def get(context, artifact_id, session): - return _get(context, artifact_id, session).to_dict() - - -def get_all(context, session, filters=None, marker=None, limit=None, - sort=None, latest=False): - """List all visible artifacts - - :param filters: dict of filter keys and values. - :param marker: artifact id after which to start page - :param limit: maximum number of artifacts to return - :param sort: a tuple (key, dir, type) where key is an attribute by - which results should be sorted, dir is a direction: 'asc' or 'desc', - and type is type of the attribute: 'bool', 'string', 'numeric' or 'int' or - None if attribute is base. - :param latest: flag that indicates, that only artifacts with highest - versions should be returned in output - """ - artifacts = _get_all( - context, session, filters, marker, limit, sort, latest) - return [af.to_dict() for af in artifacts] - - -def _apply_latest_filter(context, session, query, - basic_conds, tag_conds, prop_conds): - # Subquery to fetch max version suffix for a group (name, - # version_prefix) - ver_suffix_subq = _apply_query_base_filters( - session.query( - models.Artifact.name, - models.Artifact.version_prefix, - func.max(models.Artifact.version_suffix).label( - 'max_suffix')).group_by( - models.Artifact.name, models.Artifact.version_prefix), - context) - ver_suffix_subq = _apply_user_filters( - ver_suffix_subq, basic_conds, tag_conds, prop_conds).subquery() - # Subquery to fetch max version prefix for a name group - ver_prefix_subq = _apply_query_base_filters( - session.query(models.Artifact.name, func.max( - models.Artifact.version_prefix).label('max_prefix')).group_by( - models.Artifact.name), - context) - ver_prefix_subq = _apply_user_filters( - ver_prefix_subq, basic_conds, tag_conds, prop_conds).subquery() - # Combine two subqueries together joining them with Artifact table - query = query.join( - ver_prefix_subq, - and_(models.Artifact.name == ver_prefix_subq.c.name, - models.Artifact.version_prefix == - ver_prefix_subq.c.max_prefix)).join( - ver_suffix_subq, - and_(models.Artifact.name == ver_suffix_subq.c.name, - models.Artifact.version_prefix == - ver_suffix_subq.c.version_prefix, - models.Artifact.version_suffix == - ver_suffix_subq.c.max_suffix) - ) - - return query - - -def _apply_user_filters(query, basic_conds, tag_conds, prop_conds): - - if basic_conds: - for basic_condition in basic_conds: - query = query.filter(and_(*basic_condition)) - - if tag_conds: - for tag_condition in tag_conds: - query = query.join(models.ArtifactTag, aliased=True).filter( - and_(*tag_condition)) - - if prop_conds: - for prop_condition in prop_conds: - query = query.join(models.ArtifactProperty, aliased=True).filter( - and_(*prop_condition)) - - return query - - -def _get_all(context, session, filters=None, marker=None, limit=None, - sort=None, latest=False): - - filters = filters or {} - - query = _do_artifacts_query(context, session) - - basic_conds, tag_conds, prop_conds = _do_query_filters(filters) - - query = _apply_user_filters(query, basic_conds, tag_conds, prop_conds) - - if latest: - query = _apply_latest_filter(context, session, query, - basic_conds, tag_conds, prop_conds) - - marker_artifact = None - if marker is not None: - marker_artifact = get(context, marker, session) - - query = _do_paginate_query(query=query, limit=limit, - marker=marker_artifact, sort=sort) - - return query.all() - - -def _do_paginate_query(query, marker=None, limit=None, sort=None): - # Add sorting - number_of_custom_props = 0 - for sort_key, sort_dir, sort_type in sort: - try: - sort_dir_func = { - 'asc': sqlalchemy.asc, - 'desc': sqlalchemy.desc, - }[sort_dir] - except KeyError: - msg = _("Unknown sort direction, must be 'desc' or 'asc'.") - raise exception.BadRequest(msg) - # Note(mfedosin): Workaround to deal with situation that sqlalchemy - # cannot work with composite keys correctly - if sort_key == 'version': - query = query.order_by(sort_dir_func(models.Artifact.version_prefix))\ - .order_by(sort_dir_func(models.Artifact.version_suffix))\ - .order_by(sort_dir_func(models.Artifact.version_meta)) - elif sort_key in BASE_ARTIFACT_PROPERTIES: - # sort by generic property - query = query.order_by(sort_dir_func(getattr(models.Artifact, - sort_key))) - else: - # sort by custom property - number_of_custom_props += 1 - if number_of_custom_props > 1: - msg = _("For performance sake it's not allowed to sort by " - "more than one custom property with this db backend.") - raise exception.BadRequest(msg) - prop_table = aliased(models.ArtifactProperty) - query = ( - query.join(prop_table). - filter(prop_table.name == sort_key). - order_by(sort_dir_func(getattr(prop_table, - sort_type + '_value')))) - - # Add pagination - if marker is not None: - marker_values = [] - for sort_key, __, __ in sort: - v = marker.get(sort_key, None) - marker_values.append(v) - - # Build up an array of sort criteria as in the docstring - criteria_list = [] - for i in range(len(sort)): - crit_attrs = [] - for j in range(i): - value = marker_values[j] - if sort[j][0] in BASE_ARTIFACT_PROPERTIES: - if sort[j][0] == 'version': - value = semver_db.parse(value) - crit_attrs.append([getattr(models.Artifact, sort[j][0]) == - value]) - else: - conds = [models.ArtifactProperty.name == sort[j][0]] - conds.extend([getattr(models.ArtifactProperty, - sort[j][2] + '_value') == value]) - crit_attrs.append(conds) - - value = marker_values[i] - sort_dir_func = operator.gt if sort[i][1] == 'asc' else operator.lt - if sort[i][0] in BASE_ARTIFACT_PROPERTIES: - if sort[i][0] == 'version': - value = semver_db.parse(value) - crit_attrs.append([sort_dir_func(getattr(models.Artifact, - sort[i][0]), value)]) - else: - query = query.join(models.ArtifactProperty, aliased=True) - conds = [models.ArtifactProperty.name == sort[i][0]] - conds.extend([sort_dir_func(getattr(models.ArtifactProperty, - sort[i][2] + '_value'), value)]) - crit_attrs.append(conds) - - criteria = [and_(*crit_attr) for crit_attr in crit_attrs] - criteria_list.append(criteria) - - criteria_list = [and_(*cr) for cr in criteria_list] - query = query.filter(or_(*criteria_list)) - - if limit is not None: - query = query.limit(limit) - - return query - - -def _do_artifacts_query(context, session): - """Build the query to get all artifacts based on the context""" - - query = session.query(models.Artifact) - - query = (query.options(joinedload(models.Artifact.properties)). - options(joinedload(models.Artifact.tags)). - options(joinedload(models.Artifact.blobs))) - - return _apply_query_base_filters(query, context) - - -def _apply_query_base_filters(query, context): - # If admin, return everything. - if context.is_admin: - return query - - # If anonymous user, return only public artifacts. - # However, if context.tenant has a value, return both - # public and private artifacts of the owner. - if context.tenant is not None: - query = query.filter( - or_(models.Artifact.owner == context.tenant, - models.Artifact.visibility == 'public')) - else: - query = query.filter( - models.Artifact.visibility == 'public') - - return query - -op_mappings = { - 'eq': operator.eq, - 'gt': operator.gt, - 'gte': operator.ge, - 'lt': operator.lt, - 'lte': operator.le, - 'neq': operator.ne, -} - - -def _do_query_filters(filters): - basic_conds = [] - tag_conds = [] - prop_conds = [] - for field_name, key_name, op, field_type, value in filters: - if field_name == 'tags': - tags = utils.split_filter_value_for_quotes(value) - for tag in tags: - tag_conds.append([models.ArtifactTag.value == tag]) - elif field_name == 'tags-any': - tags = utils.split_filter_value_for_quotes(value) - tag_conds.append([models.ArtifactTag.value.in_(tags)]) - elif field_name in BASE_ARTIFACT_PROPERTIES: - if op != 'in': - fn = op_mappings[op] - if field_name == 'version': - value = semver_db.parse(value) - basic_conds.append([fn(getattr(models.Artifact, field_name), - value)]) - else: - if field_name == 'version': - value = [semver_db.parse(val) for val in value] - basic_conds.append( - [or_(*[ - models.Artifact.version == ver for ver in value])]) - else: - basic_conds.append( - [getattr(models.Artifact, field_name).in_(value)]) - else: - conds = [models.ArtifactProperty.name == field_name] - if key_name is not None: - if op == 'eq' or value is not None: - conds.extend( - [models.ArtifactProperty.key_name == key_name]) - elif op == 'in': - conds.extend( - [models.ArtifactProperty.key_name.in_(key_name)]) - if value is not None: - if op != 'in': - fn = op_mappings[op] - conds.extend([fn(getattr(models.ArtifactProperty, - field_type + '_value'), value)]) - else: - conds.extend([getattr(models.ArtifactProperty, - field_type + '_value').in_(value)]) - - prop_conds.append(conds) - - return basic_conds, tag_conds, prop_conds - - -def _do_tags(artifact, new_tags): - tags_to_update = [] - # don't touch existing tags - for tag in artifact.tags: - if tag.value in new_tags: - tags_to_update.append(tag) - new_tags.remove(tag.value) - # add new tags - for tag in new_tags: - db_tag = models.ArtifactTag() - db_tag.value = tag - tags_to_update.append(db_tag) - return tags_to_update - - -def _get_prop_type(value): - if isinstance(value, bool): - return 'bool_value' - if isinstance(value, int): - return 'int_value' - if isinstance(value, six.string_types): - return 'string_value' - if isinstance(value, float): - return 'numeric_value' - - -def _create_property(prop_name, prop_value, position=None, key_name=None): - db_prop = models.ArtifactProperty() - db_prop.name = prop_name - setattr(db_prop, _get_prop_type(prop_value), prop_value) - db_prop.position = position - db_prop.key_name = key_name - return db_prop - - -def _do_properties(artifact, new_properties): - props_to_update = [] - # don't touch the existing properties - for prop in artifact.properties: - if prop.name not in new_properties: - props_to_update.append(prop) - - for prop_name, prop_value in new_properties.items(): - if prop_value is None: - continue - if isinstance(prop_value, list): - for pos, list_prop in enumerate(prop_value): - for prop in artifact.properties: - if prop.name == prop_name and pos == prop.position: - if getattr(prop, _get_prop_type( - list_prop)) != list_prop: - setattr(prop, _get_prop_type(list_prop), - list_prop) - props_to_update.append(prop) - break - else: - props_to_update.append( - _create_property(prop_name, list_prop, position=pos) - ) - elif isinstance(prop_value, dict): - for dict_key, dict_val in prop_value.items(): - for prop in artifact.properties: - if prop.name == prop_name and prop.key_name == dict_key: - if getattr(prop, _get_prop_type(dict_val)) != dict_val: - setattr(prop, _get_prop_type(dict_val), dict_val) - props_to_update.append(prop) - break - else: - props_to_update.append( - _create_property(prop_name, dict_val, - key_name=dict_key) - ) - elif prop_value is not None: - for prop in artifact.properties: - if prop.name == prop_name: - setattr(prop, _get_prop_type(prop_value), prop_value) - props_to_update.append(prop) - break - else: - props_to_update.append(_create_property( - prop_name, prop_value)) - - return props_to_update - - -def _update_blob_values(blob, values): - for elem in ('size', 'md5', 'sha1', 'sha256', 'url', 'external', 'status', - 'content_type'): - setattr(blob, elem, values[elem]) - return blob - - -def _do_blobs(artifact, new_blobs): - blobs_to_update = [] - # don't touch the existing blobs - for blob in artifact.blobs: - if blob.name not in new_blobs: - blobs_to_update.append(blob) - - for blob_name, blob_value in new_blobs.items(): - if blob_value is None: - continue - if isinstance(blob_value.get('status'), str): - for blob in artifact.blobs: - if blob.name == blob_name: - _update_blob_values(blob, blob_value) - blobs_to_update.append(blob) - break - else: - blob = models.ArtifactBlob() - blob.name = blob_name - _update_blob_values(blob, blob_value) - blobs_to_update.append(blob) - else: - for dict_key, dict_val in blob_value.items(): - for blob in artifact.blobs: - if blob.name == blob_name and blob.key_name == dict_key: - _update_blob_values(blob, dict_val) - blobs_to_update.append(blob) - break - else: - blob = models.ArtifactBlob() - blob.name = blob_name - blob.key_name = dict_key - _update_blob_values(blob, dict_val) - blobs_to_update.append(blob) - - return blobs_to_update - - -@retry(retry_on_exception=_retry_on_deadlock, wait_fixed=500, - stop_max_attempt_number=50) -@utils.no_4byte_params -def create_lock(context, lock_key, session): - """Try to create lock record.""" - with session.begin(): - existing = session.query(models.ArtifactLock).get(lock_key) - if existing is None: - try: - lock = models.ArtifactLock() - lock.id = lock_key - lock.save(session=session) - return lock.id - except (sqlalchemy.exc.IntegrityError, - db_exception.DBDuplicateEntry): - msg = _("Cannot lock an item with key %s. " - "Lock already acquired by other request") % lock_key - raise exception.Conflict(msg) - else: - if timeutils.is_older_than(existing.acquired_at, 5): - existing.acquired_at = timeutils.utcnow() - existing.save(session) - return existing.id - else: - msg = _("Cannot lock an item with key %s. " - "Lock already acquired by other request") % lock_key - raise exception.Conflict(msg) - - -@retry(retry_on_exception=_retry_on_deadlock, wait_fixed=500, - stop_max_attempt_number=50) -def delete_lock(context, lock_id, session): - with session.begin(): - try: - session.query(models.ArtifactLock).filter_by(id=lock_id).delete() - except orm.exc.NoResultFound: - msg = _("Cannot delete a lock with id %s.") % lock_id - raise exception.NotFound(msg) - - -@retry(retry_on_exception=_retry_on_deadlock, wait_fixed=500, - stop_max_attempt_number=50) -def save_blob_data(context, blob_data_id, data, session): - """Save blob data to database.""" - with session.begin(): - blob_data = models.ArtifactBlobData() - blob_data.id = blob_data_id - blob_data.data = data.read() - blob_data.save(session=session) - return "sql://" + blob_data.id - - -@retry(retry_on_exception=_retry_on_deadlock, wait_fixed=500, - stop_max_attempt_number=50) -def get_blob_data(context, uri, session): - """Download blob data from database.""" - - blob_data_id = uri[6:] - try: - blob_data = session.query( - models.ArtifactBlobData).filter_by(id=blob_data_id).one() - except orm.exc.NoResultFound: - msg = _("Cannot find a blob data with id %s.") % blob_data_id - raise exception.NotFound(msg) - return blob_data.data - - -@retry(retry_on_exception=_retry_on_deadlock, wait_fixed=500, - stop_max_attempt_number=50) -def delete_blob_data(context, uri, session): - """Delete blob data from database.""" - with session.begin(): - blob_data_id = uri[6:] - try: - session.query( - models.ArtifactBlobData).filter_by(id=blob_data_id).delete() - except orm.exc.NoResultFound: - msg = _("Cannot delete a blob data with id %s.") % blob_data_id - raise exception.NotFound(msg) diff --git a/glare/db/sqlalchemy/models.py b/glare/db/sqlalchemy/models.py deleted file mode 100644 index 70280e0..0000000 --- a/glare/db/sqlalchemy/models.py +++ /dev/null @@ -1,271 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from oslo_db.sqlalchemy import models -from oslo_utils import timeutils -from oslo_utils import uuidutils -from sqlalchemy import BigInteger -from sqlalchemy import Boolean -from sqlalchemy import Column -from sqlalchemy import DateTime -from sqlalchemy.ext import declarative -from sqlalchemy import ForeignKey -from sqlalchemy import Index -from sqlalchemy import Integer -from sqlalchemy import LargeBinary -from sqlalchemy import Numeric -from sqlalchemy.orm import backref -from sqlalchemy.orm import composite -from sqlalchemy.orm import relationship -from sqlalchemy import String -from sqlalchemy import Text - -from glare.common import semver_db - -BASE = declarative.declarative_base() - - -class ArtifactBase(models.ModelBase): - """Base class for Artifact Models.""" - - __table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'} - __table_initialized__ = False - - def save(self, session=None): - from glare.db.sqlalchemy import api as db_api - - super(ArtifactBase, self).save(session or db_api.get_session()) - - def keys(self): - return self.__dict__.keys() - - def values(self): - return self.__dict__.values() - - def items(self): - return self.__dict__.items() - - def to_dict(self): - d = {} - for c in self.__table__.columns: - d[c.name] = self[c.name] - return d - - -def _parse_property_value(prop): - columns = [ - 'int_value', - 'string_value', - 'bool_value', - 'numeric_value'] - - for prop_type in columns: - if getattr(prop, prop_type) is not None: - return getattr(prop, prop_type) - - -def _parse_blob_value(blob): - return { - "id": blob.id, - "url": blob.url, - "status": blob.status, - "external": blob.external, - "md5": blob.md5, - "sha1": blob.sha1, - "sha256": blob.sha256, - "size": blob.size, - "content_type": blob.content_type - } - - -class Artifact(BASE, ArtifactBase): - __tablename__ = 'glare_artifacts' - __table_args__ = ( - Index('ix_glare_artifact_name_and_version', 'name', 'version_prefix', - 'version_suffix'), - Index('ix_glare_artifact_type', 'type_name'), - Index('ix_glare_artifact_status', 'status'), - Index('ix_glare_artifact_owner', 'owner'), - Index('ix_glare_artifact_visibility', 'visibility'), - {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}) - __protected_attributes__ = set(["created_at", "updated_at"]) - - id = Column(String(36), primary_key=True, - default=lambda: uuidutils.generate_uuid()) - name = Column(String(255), nullable=False) - type_name = Column(String(255), nullable=False) - version_prefix = Column(BigInteger().with_variant(Integer, "sqlite"), - nullable=False) - version_suffix = Column(String(255)) - version_meta = Column(String(255)) - version = composite(semver_db.DBVersion, version_prefix, - version_suffix, version_meta, - comparator_factory=semver_db.VersionComparator) - description = Column(Text()) - visibility = Column(String(32), nullable=False) - status = Column(String(32), nullable=False) - owner = Column(String(255)) - created_at = Column(DateTime, default=lambda: timeutils.utcnow(), - nullable=False) - updated_at = Column(DateTime, default=lambda: timeutils.utcnow(), - nullable=False, onupdate=lambda: timeutils.utcnow()) - activated_at = Column(DateTime) - - def to_dict(self): - d = super(Artifact, self).to_dict() - - d.pop('version_prefix') - d.pop('version_suffix') - d.pop('version_meta') - d['version'] = str(self.version) - - # parse tags - tags = [] - for tag in self.tags: - tags.append(tag.value) - d['tags'] = tags - - # parse properties - for prop in self.properties: - prop_value = _parse_property_value(prop) - - if prop.position is not None: - if prop.name not in d: - # create new list - d[prop.name] = [] - # insert value in position - d[prop.name].insert(prop.position, prop_value) - elif prop.key_name is not None: - if prop.name not in d: - # create new dict - d[prop.name] = {} - # insert value in the dict - d[prop.name][prop.key_name] = prop_value - else: - # make scalar - d[prop.name] = prop_value - - # parse blobs - for blob in self.blobs: - blob_value = _parse_blob_value(blob) - if blob.key_name is not None: - if blob.name not in d: - # create new dict - d[blob.name] = {} - # insert value in the dict - d[blob.name][blob.key_name] = blob_value - else: - # make scalar - d[blob.name] = blob_value - - return d - - -class ArtifactTag(BASE, ArtifactBase): - __tablename__ = 'glare_artifact_tags' - __table_args__ = (Index('ix_glare_artifact_tags_artifact_id_tag_value', - 'artifact_id', 'value'), - Index('ix_glare_artifact_tags_artifact_id', - 'artifact_id'), - {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'},) - - id = Column(String(36), primary_key=True, nullable=False, - default=lambda: uuidutils.generate_uuid()) - artifact_id = Column(String(36), ForeignKey('glare_artifacts.id'), - nullable=False) - artifact = relationship(Artifact, - backref=backref('tags', - cascade="all, delete-orphan")) - value = Column(String(255), nullable=False) - - -class ArtifactProperty(BASE, ArtifactBase): - __tablename__ = 'glare_artifact_properties' - __table_args__ = ( - Index('ix_glare_artifact_properties_artifact_id', 'artifact_id'), - Index('ix_glare_artifact_properties_name', 'name'), - {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'},) - id = Column(String(36), primary_key=True, nullable=False, - default=lambda: uuidutils.generate_uuid()) - artifact_id = Column(String(36), ForeignKey('glare_artifacts.id'), - nullable=False) - artifact = relationship(Artifact, - backref=backref('properties', - cascade="all, delete-orphan")) - name = Column(String(255), nullable=False) - string_value = Column(String(20000)) - int_value = Column(Integer) - numeric_value = Column(Numeric) - bool_value = Column(Boolean) - position = Column(Integer) - key_name = Column(String(255)) - - -class ArtifactBlob(BASE, ArtifactBase): - __tablename__ = 'glare_artifact_blobs' - __table_args__ = ( - Index('ix_glare_artifact_blobs_artifact_id', 'artifact_id'), - Index('ix_glare_artifact_blobs_name', 'name'), - {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'},) - id = Column(String(36), primary_key=True, nullable=False, - default=lambda: uuidutils.generate_uuid()) - artifact_id = Column(String(36), ForeignKey('glare_artifacts.id'), - nullable=False) - name = Column(String(255), nullable=False) - size = Column(BigInteger().with_variant(Integer, "sqlite")) - md5 = Column(String(32)) - sha1 = Column(String(40)) - sha256 = Column(String(64)) - external = Column(Boolean) - url = Column(Text) - status = Column(String(32), nullable=False) - key_name = Column(String(2048)) - content_type = Column(String(255)) - artifact = relationship(Artifact, - backref=backref('blobs', - cascade="all, delete-orphan")) - - -class ArtifactLock(BASE, ArtifactBase): - __tablename__ = 'glare_artifact_locks' - __table_args__ = ( - {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'},) - id = Column(String(255), primary_key=True, nullable=False) - acquired_at = Column( - DateTime, nullable=False, default=lambda: timeutils.utcnow()) - - -class ArtifactBlobData(BASE, ArtifactBase): - __tablename__ = 'glare_blob_data' - __table_args__ = ( - {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'},) - id = Column(String(255), primary_key=True, nullable=False) - data = Column(LargeBinary(length=(2 ** 32) - 1), nullable=False) - - -def register_models(engine): - """Create database tables for all models with the given engine.""" - models = (Artifact, ArtifactTag, ArtifactProperty, ArtifactBlob, - ArtifactLock) - for model in models: - model.metadata.create_all(engine) - - -def unregister_models(engine): - """Drop database tables for all models with the given engine.""" - models = (ArtifactLock, ArtifactBlob, ArtifactProperty, ArtifactTag, - Artifact) - for model in models: - model.metadata.drop_all(engine) diff --git a/glare/engine.py b/glare/engine.py deleted file mode 100644 index 19b920e..0000000 --- a/glare/engine.py +++ /dev/null @@ -1,578 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from copy import deepcopy -import os - -import jsonpatch -from oslo_config import cfg -from oslo_log import log as logging -from oslo_utils import excutils -from oslo_utils import timeutils -from oslo_utils import uuidutils - -from glare.common import exception -from glare.common import policy -from glare.common import store_api -from glare.common import utils -from glare.db import artifact_api -from glare.i18n import _ -from glare import locking -from glare.notification import Notifier -from glare.objects.meta import registry - -CONF = cfg.CONF -LOG = logging.getLogger(__name__) - - -class Engine(object): - """Engine is responsible for executing different helper operations when - processing incoming requests from Glare API. - - Engine receives incoming data and does the following: - - check basic policy permissions; - - requests artifact definition from artifact type registry; - - check access permission(ro, rw); - - lock artifact for update if needed; - - pass data to base artifact to execute all business logic operations - with database; - - notify other users about finished operation. - - Engine should not include any business logic and validation related - to Artifacts. Engine should not know any internal details of artifact - type, because this part of the work is done by Base artifact type. - """ - def __init__(self): - # register all artifact types - registry.ArtifactRegistry.register_all_artifacts() - - # generate all schemas - self.schemas = {} - for name, type_list in registry.ArtifactRegistry.obj_classes().items(): - type_name = type_list[0].get_type_name() - self.schemas[type_name] = registry.ArtifactRegistry.\ - get_artifact_type(type_name).gen_schemas() - - lock_engine = locking.LockEngine(artifact_api.ArtifactLockApi()) - - def _create_scoped_lock(self, context, type_name, name, version, - owner, visibility='private'): - """Create scoped lock for artifact.""" - # validate that artifact doesn't exist for the scope - filters = [('name', 'eq:' + name), ('version', 'eq:' + version)] - if visibility == 'public': - filters.extend([('visibility', 'public')]) - elif visibility == 'private': - filters.extend([('owner', 'eq:' + owner), - ('visibility', 'private')]) - - scope_id = "%s:%s:%s" % (type_name, name, version) - if visibility != 'public': - scope_id += ':%s' % owner - lock = self.lock_engine.acquire(context, scope_id) - - try: - if len(self.list(context, type_name, filters)) > 0: - msg = _("Artifact with this name and version is already " - "exists for this scope.") - raise exception.Conflict(msg) - except Exception: - with excutils.save_and_reraise_exception(logger=LOG): - self.lock_engine.release(lock) - - return lock - - @staticmethod - def _show_artifact(ctx, type_name, artifact_id, read_only=False): - """Return artifact requested by user. - - Check access permissions and policies. - - :param ctx: user context - :param type_name: artifact type name - :param artifact_id: id of the artifact to be updated - :param read_only: flag, if set to True only read access is checked, - if False then engine checks if artifact can be modified by the user - """ - artifact_type = registry.ArtifactRegistry.get_artifact_type(type_name) - # only artifact is available for class users - af = artifact_type.show(ctx, artifact_id) - if not read_only: - if not ctx.is_admin and ctx.tenant != af.owner or ctx.read_only: - raise exception.Forbidden() - LOG.debug("Artifact %s acquired for read-write access", - artifact_id) - else: - LOG.debug("Artifact %s acquired for read-only access", artifact_id) - - return af - - def show_type_schemas(self, context, type_name=None): - policy.authorize("artifact:type_list", {}, context) - if type_name is None: - return self.schemas - if type_name not in self.schemas: - msg = _("Artifact type %s does not exist") % type_name - raise exception.NotFound(message=msg) - return self.schemas[type_name] - - def _apply_patch(self, context, af, patch): - # This function is a collection of hacks and workarounds to make - # json patch apply changes to oslo_vo object. - action_names = {'artifact:update'} - af_dict = af.to_dict() - try: - for operation in patch._ops: - # apply the change to make sure that it's correct - af_dict = operation.apply(af_dict) - - # format of location is "/key/value" or just "/key" - # first case symbolizes that we have dict or list insertion, - # second, that we work with a field itself. - items = operation.location.split('/', 2) - field_name = items[1] - if af.is_blob(field_name) or af.is_blob_dict(field_name): - msg = _("Cannot add blob with this request. " - "Use special Blob API for that.") - raise exception.BadRequest(msg) - if len(items) == 2 and operation.operation['op'] == 'remove': - msg = _("Cannot remove field '%s' from " - "artifact.") % field_name - raise exception.BadRequest(msg) - - # work with hooks and define action names - if field_name == 'visibility': - utils.validate_visibility_transition( - af, - from_visibility=af.visibility, - to_visibility=af_dict['visibility'] - ) - if af_dict['visibility'] == 'public': - af.validate_publish(context, af) - action_names.add('artifact:publish') - elif field_name == 'status': - utils.validate_status_transition( - af, from_status=af.status, to_status=af_dict['status']) - if af_dict['status'] == 'deactivated': - action_names.add('artifact:deactivate') - elif af_dict['status'] == 'active': - if af.status == 'deactivated': - action_names.add('artifact:reactivate') - else: - af.validate_activate(context, af) - action_names.add('artifact:activate') - else: - utils.validate_change_allowed(af, field_name) - - old_val = getattr(af, field_name) - setattr(af, field_name, af_dict[field_name]) - new_val = getattr(af, field_name) - if new_val == old_val: - # No need to save value to db if it's not changed - af.obj_reset_changes([field_name]) - - except (jsonpatch.JsonPatchException, - jsonpatch.JsonPointerException, TypeError) as e: - raise exception.BadRequest(message=str(e)) - - return action_names - - def create(self, context, type_name, values): - """Create artifact record in Glare. - - :param context: user context - :param type_name: artifact type name - :param values: dict with artifact fields - :return: dict representation of created artifact - """ - action_name = "artifact:create" - policy.authorize(action_name, values, context) - artifact_type = registry.ArtifactRegistry.get_artifact_type(type_name) - version = values.get('version', artifact_type.DEFAULT_ARTIFACT_VERSION) - init_values = { - 'id': uuidutils.generate_uuid(), - 'name': values.pop('name'), - 'version': version, - 'owner': context.tenant, - 'created_at': timeutils.utcnow(), - 'updated_at': timeutils.utcnow() - } - af = artifact_type.init_artifact(context, init_values) - # acquire scoped lock and execute artifact create - with self._create_scoped_lock(context, type_name, af.name, - af.version, context.tenant): - for field_name, value in values.items(): - if af.is_blob(field_name) or af.is_blob_dict(field_name): - msg = _("Cannot add blob with this request. " - "Use special Blob API for that.") - raise exception.BadRequest(msg) - utils.validate_change_allowed(af, field_name) - setattr(af, field_name, value) - af = af.create(context) - # notify about new artifact - Notifier.notify(context, action_name, af) - # return artifact to the user - return af.to_dict() - - def save(self, context, type_name, artifact_id, patch): - """Update artifact with json patch. - - Apply patch to artifact and validate artifact before updating it - in database. If there is request for visibility or status change - then call specific method for that. - - :param context: user context - :param type_name: name of artifact type - :param artifact_id: id of the artifact to be updated - :param patch: json patch object - :return: dict representation of updated artifact - """ - lock_key = "%s:%s" % (type_name, artifact_id) - with self.lock_engine.acquire(context, lock_key): - af = self._show_artifact(context, type_name, artifact_id) - af.obj_reset_changes() - action_names = self._apply_patch(context, af, patch) - updates = af.obj_changes_to_primitive() - - LOG.debug("Update diff successfully calculated for artifact " - "%(af)s %(diff)s", {'af': artifact_id, 'diff': updates}) - if not updates: - return af.to_dict() - - for action_name in action_names: - policy.authorize(action_name, af.to_dict(), context) - - if any(i in updates for i in ('name', 'version', 'visibility')): - # to change an artifact scope it's required to set a lock first - with self._create_scoped_lock( - context, type_name, updates.get('name', af.name), - updates.get('version', af.version), af.owner, - updates.get('visibility', af.visibility)): - modified_af = af.save(context) - else: - modified_af = af.save(context) - - for action_name in action_names: - Notifier.notify(context, action_name, modified_af) - return modified_af.to_dict() - - def show(self, context, type_name, artifact_id): - """Show detailed artifact info. - - :param context: user context - :param type_name: Artifact type name - :param artifact_id: id of artifact to show - :return: definition of requested artifact - """ - policy.authorize("artifact:get", {}, context) - af = self._show_artifact(context, type_name, artifact_id, - read_only=True) - return af.to_dict() - - @staticmethod - def list(context, type_name, filters, marker=None, limit=None, - sort=None, latest=False): - """Return list of artifacts requested by user. - - :param context: user context - :param type_name: Artifact type name - :param filters: filters that need to be applied to artifact - :param marker: the artifact that considered as begin of the list - so all artifacts before marker (including marker itself) will not be - added to artifact list - :param limit: maximum number of items in list - :param sort: sorting options - :param latest: flag that indicates, that only artifacts with highest - versions should be returned in output - :return: list of artifact definitions - """ - policy.authorize("artifact:list", {}, context) - artifact_type = registry.ArtifactRegistry.get_artifact_type(type_name) - # return list to the user - af_list = [af.to_dict() - for af in artifact_type.list(context, filters, marker, - limit, sort, latest)] - return af_list - - @staticmethod - def _delete_blobs(context, af, blobs): - for name, blob in blobs.items(): - if af.is_blob(name): - if not blob['external']: - try: - store_api.delete_blob(blob['url'], context=context) - except exception.NotFound: - # data has already been removed - pass - af.db_api.update_blob(context, af.id, {name: None}) - elif af.is_blob_dict(name): - upd_blob = deepcopy(blob) - for key, val in blob.items(): - if not val['external']: - try: - store_api.delete_blob(val['url'], context=context) - except exception.NotFound: - pass - del upd_blob[key] - af.db_api.update_blob(context, af.id, {name: upd_blob}) - - def delete(self, context, type_name, artifact_id): - """Delete artifact from Glare. - - :param context: User context - :param type_name: Artifact type name - :param artifact_id: id of artifact to delete - """ - af = self._show_artifact(context, type_name, artifact_id) - action_name = 'artifact:delete' - policy.authorize(action_name, af.to_dict(), context) - af.validate_delete(context, af) - blobs = af.delete(context, af) - - delayed_delete = getattr(CONF, type_name).delayed_delete - # use global parameter if delayed delete isn't set per artifact type - if delayed_delete is None: - delayed_delete = CONF.delayed_delete - - if not delayed_delete: - if blobs: - # delete blobs one by one - self._delete_blobs(context, af, blobs) - LOG.info("Blobs successfully deleted for artifact %s", af.id) - # delete artifact itself - af.db_api.delete(context, af.id) - Notifier.notify(context, action_name, af) - - @staticmethod - def _get_blob_info(af, field_name, blob_key=None): - """Return requested blob info""" - if blob_key: - if not af.is_blob_dict(field_name): - msg = _("%s is not a blob dict") % field_name - raise exception.BadRequest(msg) - return getattr(af, field_name).get(blob_key) - else: - if not af.is_blob(field_name): - msg = _("%s is not a blob") % field_name - raise exception.BadRequest(msg) - return getattr(af, field_name, None) - - @staticmethod - def _save_blob_info(context, af, field_name, blob_key, value): - """Save blob instance in database.""" - if blob_key is not None: - # Insert blob value in the folder - folder = getattr(af, field_name) - folder[blob_key] = value - value = folder - return af.update_blob(context, af.id, field_name, value) - - def add_blob_location(self, context, type_name, artifact_id, field_name, - location, blob_meta, blob_key=None): - """Add external location to blob. - - :param context: user context - :param type_name: name of artifact type - :param artifact_id: id of the artifact to be updated - :param field_name: name of blob or blob dict field - :param location: external blob url - :param blob_meta: dictionary containing blob metadata like md5 checksum - :param blob_key: if field_name is blob dict it specifies key - in this dict - :return: dict representation of updated artifact - """ - blob_name = "%s[%s]" % (field_name, blob_key)\ - if blob_key else field_name - - blob = {'url': location, 'size': None, 'md5': blob_meta.get("md5"), - 'sha1': blob_meta.get("sha1"), 'id': uuidutils.generate_uuid(), - 'sha256': blob_meta.get("sha256"), 'status': 'active', - 'external': True, 'content_type': None} - - lock_key = "%s:%s" % (type_name, artifact_id) - with self.lock_engine.acquire(context, lock_key): - af = self._show_artifact(context, type_name, artifact_id) - action_name = 'artifact:set_location' - policy.authorize(action_name, af.to_dict(), context) - if self._get_blob_info(af, field_name, blob_key): - msg = _("Blob %(blob)s already exists for artifact " - "%(af)s") % {'blob': field_name, 'af': af.id} - raise exception.Conflict(message=msg) - utils.validate_change_allowed(af, field_name) - modified_af = self._save_blob_info( - context, af, field_name, blob_key, blob) - - LOG.info("External location %(location)s has been created " - "successfully for artifact %(artifact)s blob %(blob)s", - {'location': location, 'artifact': af.id, - 'blob': blob_name}) - - Notifier.notify(context, action_name, modified_af) - return modified_af.to_dict() - - def upload_blob(self, context, type_name, artifact_id, field_name, fd, - content_type, blob_key=None): - """Upload Artifact blob. - - :param context: user context - :param type_name: name of artifact type - :param artifact_id: id of the artifact to be updated - :param field_name: name of blob or blob dict field - :param fd: file descriptor that Glare uses to upload the file - :param content_type: data content-type - :param blob_key: if field_name is blob dict it specifies key - in this dictionary - :return: dict representation of updated artifact - """ - - blob_name = "%s[%s]" % (field_name, blob_key) \ - if blob_key else field_name - blob_id = uuidutils.generate_uuid() - - lock_key = "%s:%s" % (type_name, artifact_id) - with self.lock_engine.acquire(context, lock_key): - af = self._show_artifact(context, type_name, artifact_id) - action_name = "artifact:upload" - policy.authorize(action_name, af.to_dict(), context) - - # create an an empty blob instance in db with 'saving' status - if self._get_blob_info(af, field_name, blob_key): - msg = _("Blob %(blob)s already exists for artifact " - "%(af)s") % {'blob': field_name, 'af': af.id} - raise exception.Conflict(message=msg) - utils.validate_change_allowed(af, field_name) - blob = {'url': None, 'size': None, 'md5': None, 'sha1': None, - 'sha256': None, 'id': blob_id, 'status': 'saving', - 'external': False, 'content_type': content_type} - - modified_af = self._save_blob_info( - context, af, field_name, blob_key, blob) - - LOG.debug("Parameters validation for artifact %(artifact)s blob " - "upload passed for blob %(blob_name)s. " - "Start blob uploading to backend.", - {'artifact': af.id, 'blob_name': blob_name}) - - # try to perform blob uploading to storage - path = None - try: - try: - # call upload hook first - fd, path = af.validate_upload(context, af, field_name, fd) - except Exception as e: - raise exception.BadRequest(message=str(e)) - - max_allowed_size = af.get_max_blob_size(field_name) - # Check if we wanna upload to a folder (and not just to a Blob) - if blob_key is not None: - blobs_dict = getattr(af, field_name) - overall_folder_size = sum( - blob["size"] for blob in blobs_dict.values() - if blob["size"] is not None) - max_folder_size_allowed = af.get_max_folder_size(field_name) \ - - overall_folder_size # always non-negative - max_allowed_size = min(max_allowed_size, - max_folder_size_allowed) - - default_store = af.get_default_store( - context, af, field_name, blob_key) - location_uri, size, checksums = store_api.save_blob_to_store( - blob_id, fd, context, max_allowed_size, - store_type=default_store) - except Exception: - # if upload failed remove blob from db and storage - with excutils.save_and_reraise_exception(logger=LOG): - if blob_key is None: - af.update_blob(context, af.id, field_name, None) - else: - blob_dict_attr = getattr(modified_af, field_name) - del blob_dict_attr[blob_key] - af.update_blob(context, af.id, field_name, blob_dict_attr) - finally: - if path: - os.remove(path) - - LOG.info("Successfully finished blob uploading for artifact " - "%(artifact)s blob field %(blob)s.", - {'artifact': af.id, 'blob': blob_name}) - - # update blob info and activate it - blob.update({'url': location_uri, - 'status': 'active', - 'size': size}) - blob.update(checksums) - - with self.lock_engine.acquire(context, lock_key): - af = af.show(context, artifact_id) - modified_af = self._save_blob_info( - context, af, field_name, blob_key, blob) - - Notifier.notify(context, action_name, modified_af) - return modified_af.to_dict() - - def download_blob(self, context, type_name, artifact_id, field_name, - blob_key=None): - """Download binary data from Glare Artifact. - - :param context: user context - :param type_name: name of artifact type - :param artifact_id: id of the artifact to be updated - :param field_name: name of blob or blob dict field - :param blob_key: if field_name is blob dict it specifies key - in this dict - :return: file iterator for requested file - """ - af = self._show_artifact(context, type_name, artifact_id, - read_only=True) - policy.authorize("artifact:download", af.to_dict(), context) - - blob_name = "%s[%s]" % (field_name, blob_key)\ - if blob_key else field_name - - if af.status == 'deleted': - msg = _("Cannot download data when artifact is deleted") - raise exception.Forbidden(message=msg) - - blob = self._get_blob_info(af, field_name, blob_key) - if blob is None: - msg = _("No data found for blob %s") % blob_name - raise exception.NotFound(message=msg) - if blob['status'] != 'active': - msg = _("%s is not ready for download") % blob_name - raise exception.Conflict(message=msg) - - meta = {'md5': blob.get('md5'), - 'sha1': blob.get('sha1'), - 'sha256': blob.get('sha256'), - 'external': blob.get('external')} - if blob['external']: - data = {'url': blob['url']} - else: - data = store_api.load_from_store(uri=blob['url'], context=context) - meta['size'] = blob.get('size') - meta['content_type'] = blob.get('content_type') - - path = None - try: - # call download hook in the end - data, path = af.validate_download( - context, af, field_name, data) - except Exception as e: - raise exception.BadRequest(message=str(e)) - finally: - if path: - os.remove(path) - - return data, meta diff --git a/glare/hacking/__init__.py b/glare/hacking/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/hacking/checks.py b/glare/hacking/checks.py deleted file mode 100644 index 38b3f70..0000000 --- a/glare/hacking/checks.py +++ /dev/null @@ -1,156 +0,0 @@ -# Copyright (c) 2014 OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import re - -""" -Guidelines for writing new hacking checks - - - Use only for Glare-specific tests. OpenStack general tests - should be submitted to the common 'hacking' module. - - Pick numbers in the range G3xx. Find the current test with - the highest allocated number and then pick the next value. - If nova has an N3xx code for that test, use the same number. - - Keep the test method code in the source file ordered based - on the G3xx value. - - List the new rule in the top level HACKING.rst file - - Add test cases for each new rule to glare/tests/test_hacking.py - -""" - - -asse_trueinst_re = re.compile( - r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, " - "(\w|\.|\'|\"|\[|\])+\)\)") -asse_equal_type_re = re.compile( - r"(.)*assertEqual\(type\((\w|\.|\'|\"|\[|\])+\), " - "(\w|\.|\'|\"|\[|\])+\)") -asse_equal_end_with_none_re = re.compile( - r"(.)*assertEqual\((\w|\.|\'|\"|\[|\])+, None\)") -asse_equal_start_with_none_re = re.compile( - r"(.)*assertEqual\(None, (\w|\.|\'|\"|\[|\])+\)") -unicode_func_re = re.compile(r"(\s|\W|^)unicode\(") - -_all_log_levels = {'debug', 'error', 'info', 'warning', - 'critical', 'exception'} -# Since _Lx have been removed, we just need to check _() -translated_logs = re.compile( - r"(.)*LOG\.(%(level)s)\(\s*_\(" % {'level': '|'.join(_all_log_levels)}) - -dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)") - - -def assert_true_instance(logical_line): - """Check for assertTrue(isinstance(a, b)) sentences - - G316 - """ - if asse_trueinst_re.match(logical_line): - yield (0, "G316: assertTrue(isinstance(a, b)) sentences not allowed") - - -def assert_equal_type(logical_line): - """Check for assertEqual(type(A), B) sentences - - G317 - """ - if asse_equal_type_re.match(logical_line): - yield (0, "G317: assertEqual(type(A), B) sentences not allowed") - - -def assert_equal_none(logical_line): - """Check for assertEqual(A, None) or assertEqual(None, A) sentences - - G318 - """ - res = (asse_equal_start_with_none_re.match(logical_line) or - asse_equal_end_with_none_re.match(logical_line)) - if res: - yield (0, "G318: assertEqual(A, None) or assertEqual(None, A) " - "sentences not allowed") - - -def no_translate_logs(logical_line): - """Check for use of LOG.*(_( - - G319 - """ - if translated_logs.match(logical_line): - yield (0, "G319: Don't translate logs") - - -def no_direct_use_of_unicode_function(logical_line): - """Check for use of unicode() builtin - - G320 - """ - if unicode_func_re.match(logical_line): - yield(0, "G320: Use six.text_type() instead of unicode()") - - -def check_no_contextlib_nested(logical_line): - msg = ("G327: contextlib.nested is deprecated since Python 2.7. See " - "https://docs.python.org/2/library/contextlib.html#contextlib." - "nested for more information.") - if ("with contextlib.nested(" in logical_line or - "with nested(" in logical_line): - yield(0, msg) - - -def dict_constructor_with_list_copy(logical_line): - msg = ("G328: Must use a dict comprehension instead of a dict constructor " - "with a sequence of key-value pairs.") - if dict_constructor_with_list_copy_re.match(logical_line): - yield (0, msg) - - -def check_python3_xrange(logical_line): - if re.search(r"\bxrange\s*\(", logical_line): - yield(0, "G329: Do not use xrange. Use range, or six.moves.range for " - "large loops.") - - -def check_python3_no_iteritems(logical_line): - msg = ("G330: Use six.iteritems() or dict.items() instead of " - "dict.iteritems().") - if re.search(r".*\.iteritems\(\)", logical_line): - yield(0, msg) - - -def check_python3_no_iterkeys(logical_line): - msg = ("G331: Use six.iterkeys() or dict.keys() instead of " - "dict.iterkeys().") - if re.search(r".*\.iterkeys\(\)", logical_line): - yield(0, msg) - - -def check_python3_no_itervalues(logical_line): - msg = ("G332: Use six.itervalues() or dict.values instead of " - "dict.itervalues().") - if re.search(r".*\.itervalues\(\)", logical_line): - yield(0, msg) - - -def factory(register): - register(assert_true_instance) - register(assert_equal_type) - register(assert_equal_none) - register(no_translate_logs) - register(no_direct_use_of_unicode_function) - register(check_no_contextlib_nested) - register(dict_constructor_with_list_copy) - register(check_python3_xrange) - register(check_python3_no_iteritems) - register(check_python3_no_iterkeys) - register(check_python3_no_itervalues) diff --git a/glare/i18n.py b/glare/i18n.py deleted file mode 100644 index e61c9bb..0000000 --- a/glare/i18n.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright 2014 Red Hat, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_i18n import * # noqa - -_translators = TranslatorFactory(domain='glare') - -# The primary translation function using the well-known name "_" -_ = _translators.primary diff --git a/glare/locking.py b/glare/locking.py deleted file mode 100644 index e32f0d2..0000000 --- a/glare/locking.py +++ /dev/null @@ -1,113 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo_log import log as logging - -LOG = logging.getLogger(__name__) - - -class LockApiBase(object): - """Lock Api Base class that responsible for acquiring/releasing locks.""" - - def create_lock(self, context, lock_key): - """Acquire lock for current user. - - :param context: user context - :param lock_key: unique lock identifier that defines lock scope - :return: lock internal identifier - """ - raise NotImplementedError() - - def delete_lock(self, context, lock_id): - """Delete acquired user lock. - - :param context: user context - :param lock_id: lock internal identifier - """ - raise NotImplementedError() - - -class Lock(object): - """Object that stores lock context for users. This class is internal - and used only in lock engine, so users shouldn't use this class directly. - """ - - def __init__(self, context, lock_id, lock_key, release_method): - """Initialize lock context.""" - self.context = context - self.lock_id = lock_id - self.lock_key = lock_key - self.release = release_method - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - # TODO(kairat) catch all exceptions here - self.release(self) - - -class LockEngine(object): - """Glare lock engine. - - Defines how artifact updates must be synchronized with each other. When - some user obtains a lock for the same artifact then other user cannot - request that lock and gets a Conflict error. - """ - # NOTE(kairat): Lock Engine also allows to encapsulate lock logic in one - # place so we can potentially add tooz functionality in future to Glare. - # Right now there are troubles with locks in Galera (especially in mysql) - # and zookeeper requires additional work from IT engineers. So we need - # support production ready DB locks in our implementation. - - MAX_LOCK_LENGTH = 255 - - def __init__(self, lock_api): - """Initialize lock engine with some lock api. - - :param lock_api: api that allows to create/delete locks - """ - # NOTE(kairat): lock_api is db_api now but it might be - # replaced with DLM in near future. - self.lock_api = lock_api - - def acquire(self, context, lock_key): - """Acquire lock for artifact. - - If there is some other lock with the same key then - raise Conflict Error. - - :param context: user context - :param lock_key: lock key - :return: lock definition - """ - if lock_key is not None and len(lock_key) < self.MAX_LOCK_LENGTH: - lock_id = self.lock_api.create_lock(context, lock_key) - LOG.info("Lock %(lock_id)s acquired for lock_key %(lock_key)s", - {'lock_id': lock_id, 'lock_key': lock_key}) - else: - lock_id = None - LOG.info("No lock for lock_key %s", lock_key) - - return Lock(context, lock_id, lock_key, self.release) - - def release(self, lock): - """Release lock for artifact. - - :param lock: Lock object - """ - if lock.lock_id is not None: - self.lock_api.delete_lock(lock.context, lock.lock_id) - LOG.info("Lock %(lock_id)s released for lock_key %(key)s", - {'lock_id': lock.lock_id, 'key': lock.lock_key}) diff --git a/glare/notification.py b/glare/notification.py deleted file mode 100644 index ccb2d08..0000000 --- a/glare/notification.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_log import log as logging -import oslo_messaging - -CONF = cfg.CONF -LOG = logging.getLogger(__name__) - -notifier_opts = [ - cfg.HostAddressOpt('glare_publisher_id', default="artifact.localhost", - help='Default publisher_id for outgoing ' - 'Glare notifications.')] -CONF.register_opts(notifier_opts) - - -def get_transport(): - return oslo_messaging.get_notification_transport(CONF) - - -def set_defaults(control_exchange='glare'): - oslo_messaging.set_transport_defaults(control_exchange) - - -class Notifier(object): - """Simple interface to receive Glare notifier.""" - - SERVICE_NAME = 'artifact' - GLARE_NOTIFIER = None - - @classmethod - def _get_notifier(cls): - if cls.GLARE_NOTIFIER is None: - cls.GLARE_NOTIFIER = oslo_messaging.Notifier( - get_transport(), - publisher_id=CONF.glare_publisher_id) - return cls.GLARE_NOTIFIER - - @classmethod - def notify(cls, context, event_type, body, level='INFO'): - """Notify Glare listeners with some useful info. - - :param context: User request context - :param event_type: type of event - :param body: notification payload - :param level: notification level ("INFO", "WARN", "ERROR", etc) - """ - af_notifier = cls._get_notifier() - method = getattr(af_notifier, level.lower()) - method({}, "%s.%s" % (cls.SERVICE_NAME, event_type), - body.to_notification()) - LOG.debug('Notification event %(event)s send successfully for ' - 'request %(request)s', {'event': event_type, - 'request': context.request_id}) diff --git a/glare/objects/__init__.py b/glare/objects/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/objects/all.py b/glare/objects/all.py deleted file mode 100644 index 0ad3465..0000000 --- a/glare/objects/all.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_versionedobjects import fields - -from glare.common import exception -from glare.objects import base -from glare.objects.meta import wrappers - - -Field = wrappers.Field.init - - -class All(base.BaseArtifact): - """Artifact type that allows to get artifacts regardless of their type""" - - fields = { - 'type_name': Field(fields.StringField, - description="Name of artifact type."), - } - - @classmethod - def create(cls, context): - raise exception.Forbidden("This type is read only.") - - def save(self, context): - raise exception.Forbidden("This type is read only.") - - @classmethod - def delete(cls, context, af): - raise exception.Forbidden("This type is read only.") - - @classmethod - def update_blob(cls, context, af_id, field_name, values): - raise exception.Forbidden("This type is read only.") - - @classmethod - def get_type_name(cls): - return "all" diff --git a/glare/objects/base.py b/glare/objects/base.py deleted file mode 100644 index 09b6288..0000000 --- a/glare/objects/base.py +++ /dev/null @@ -1,609 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_log import log as logging -from oslo_versionedobjects import base -from oslo_versionedobjects import fields - -from glare.common import exception -from glare.common import utils -from glare.db import artifact_api -from glare.i18n import _ -from glare.objects.meta import fields as glare_fields -from glare.objects.meta import validators -from glare.objects.meta import wrappers - -global_artifact_opts = [ - cfg.BoolOpt('delayed_delete', default=False, - help=_("If False defines that artifacts must be deleted " - "immediately after the user call. Otherwise they just " - "will be marked as deleted so they can be scrubbed " - "by some other tool in the background.")), -] - -CONF = cfg.CONF -CONF.register_opts(global_artifact_opts) - -LOG = logging.getLogger(__name__) - - -class BaseArtifact(base.VersionedObject): - """BaseArtifact is a central place in Glare. It execute Glare business - logic operations and checks in like: - 1) Check if artifact satisfies all requirements and can be activated - 2) Check that artifact is not deactivated and download blobs - ... - BaseArtifact interacts with database and saves/request artifact info - from specified database API. Base Artifact is an abstract class so - all concrete classes must be inherited from that class. Concrete classes - must define custom fields in addition to BaseArtifact fields and db_api - that must be used for interaction with database. - """ - - OBJ_PROJECT_NAMESPACE = 'glare' - - DEFAULT_ARTIFACT_VERSION = '0.0.0' - - STATUS = ('drafted', 'active', 'deactivated', 'deleted') - - Field = wrappers.Field.init - DictField = wrappers.DictField.init - ListField = wrappers.ListField.init - Blob = wrappers.BlobField.init - - fields = { - 'id': Field(fields.StringField, system=True, - validators=[validators.UUID()], nullable=False, - sortable=True, description="Artifact UUID."), - 'name': Field(fields.StringField, required_on_activate=False, - nullable=False, sortable=True, - validators=[validators.MinStrLen(1)], - description="Artifact Name."), - 'owner': Field(fields.StringField, system=True, - required_on_activate=False, nullable=False, - sortable=True, description="ID of user/tenant who " - "uploaded artifact."), - 'status': Field(fields.StringField, default='drafted', - nullable=False, sortable=True, mutable=True, - validators=[validators.AllowedValues(STATUS)], - description="Artifact status."), - 'created_at': Field(fields.DateTimeField, system=True, - nullable=False, sortable=True, - description="Datetime when artifact has " - "been created."), - 'updated_at': Field(fields.DateTimeField, system=True, - nullable=False, sortable=True, mutable=True, - description="Datetime when artifact has " - "been updated last time."), - 'activated_at': Field(fields.DateTimeField, system=True, - required_on_activate=False, sortable=True, - description="Datetime when artifact has became " - "active."), - 'description': Field(fields.StringField, mutable=True, - required_on_activate=False, default="", - validators=[validators.MaxStrLen(4096)], - filter_ops=[], - description="Artifact description."), - 'tags': ListField(fields.String, mutable=True, - required_on_activate=False, - # tags are filtered without any operators - filter_ops=[], - validators=[validators.Unique(convert_to_set=True)], - element_validators=[ - validators.ForbiddenChars([',', '/']), - validators.MinStrLen(1) - ], - description="List of tags added to Artifact."), - 'metadata': DictField(fields.String, required_on_activate=False, - element_validators=[validators.MinStrLen(1)], - description="Key-value dict with useful " - "information about an artifact."), - 'visibility': Field(fields.StringField, default='private', - nullable=False, sortable=True, mutable=True, - validators=[validators.AllowedValues( - ['private', 'public'])], - description="Artifact visibility that defines " - "if artifact can be available to " - "other users."), - 'version': Field(glare_fields.VersionField, required_on_activate=False, - default=DEFAULT_ARTIFACT_VERSION, nullable=False, - sortable=True, validators=[validators.Version()], - description="Artifact version(semver).") - } - - artifact_type_opts = [ - cfg.BoolOpt('delayed_delete', - help=_( - "If False defines that artifacts must be deleted " - "immediately after the user call. Otherwise they just " - "will be marked as deleted so they can be scrubbed " - "by some other tool in the background. " - "Redefines global parameter of the same name " - "from [DEFAULT] section.")), - ] - - def __new__(cls, *args, **kwargs): - CONF.register_opts(cls.artifact_type_opts, group=cls.get_type_name()) - return base.VersionedObject.__new__(cls) - - @classmethod - def list_artifact_type_opts(cls): - return cls.artifact_type_opts - - db_api = artifact_api.ArtifactAPI() - - @classmethod - def is_blob(cls, field_name): - """Helper to check that a field is a blob. - - :param field_name: name of the field - :return: True if the field is a blob, False otherwise - """ - return isinstance(cls.fields.get(field_name), glare_fields.BlobField) - - @classmethod - def is_blob_dict(cls, field_name): - """Helper to check that field is a blob dict. - - :param field_name: name of the field - :return: True if the field is a blob dict, False otherwise - """ - return (isinstance(cls.fields.get(field_name), glare_fields.Dict) and - cls.fields[field_name].element_type == - glare_fields.BlobFieldType) - - @classmethod - def init_artifact(cls, context, values): - """Initialize an empty versioned object with values. - - Initialize vo object with default values and values specified by user. - Also reset all changes of initialized object so user can track own - changes. - - :param context: user context - :param values: values needs to be set - :return: artifact with initialized values - """ - af = cls(context) - # setup default values for all non specified fields - default_fields = [] - for field in af.fields: - if field not in values: - default_fields.append(field) - if default_fields: - af.obj_set_defaults(*default_fields) - - # apply values specified by user - for name, value in values.items(): - setattr(af, name, value) - return af - - @classmethod - def get_type_name(cls): - """Return type name that allows to find artifact type in Glare - - Type name allows to find artifact type definition in Glare registry. - - :return: string that identifies current artifact type - """ - raise NotImplementedError() - - def create(self, context): - """Create new artifact in Glare repo. - - :param context: user context - :return: created artifact object - """ - values = self.obj_changes_to_primitive() - values['type_name'] = self.get_type_name() - af_vals = self.db_api.save(context, None, values) - return self.init_artifact(context, af_vals) - - def save(self, context): - """Save artifact in Glare repo. - - :param context: user context - :return: updated artifact object - """ - updated_af = self.db_api.save(context, self.id, - self.obj_changes_to_primitive()) - return self.init_artifact(context, updated_af) - - @classmethod - def show(cls, context, artifact_id): - """Return Artifact from Glare repo - - :param context: user context - :param artifact_id: id of requested artifact - :return: requested artifact object - """ - af = cls.db_api.get(context, artifact_id) - return cls.init_artifact(context, af) - - @classmethod - def _get_field_type(cls, obj): - """Get string representation of field type for filters.""" - if isinstance(obj, fields.IntegerField) or obj is fields.Integer: - return 'int' - elif isinstance(obj, fields.FloatField) or obj is fields.Float: - return 'numeric' - elif isinstance(obj, fields.FlexibleBooleanField) or \ - obj is fields.FlexibleBoolean: - return 'bool' - return 'string' - - @classmethod - def _parse_sort_values(cls, sort): - """Prepare sorting parameters for database.""" - new_sort = [] - for key, direction in sort: - if key not in cls.fields: - msg = _("The field %s doesn't exist.") % key - raise exception.BadRequest(msg) - # check if field can be sorted - if not cls.fields[key].sortable: - msg = _("The field %s is not sortable.") % key - raise exception.BadRequest(msg) - new_sort.append((key, direction, cls._get_field_type( - cls.fields.get(key)))) - return new_sort - - @classmethod - def _validate_filter_ops(cls, filter_name, op): - field = cls.fields.get(filter_name) - if op not in field.filter_ops: - msg = (_("Unsupported filter type '%(key)s'." - "The following filters are supported " - "%(filters)s") % { - 'key': op, 'filters': str(field.filter_ops)}) - raise exception.BadRequest(message=msg) - - @classmethod - def _parse_filter_values(cls, filters): - # input format for filters is list of tuples: - # (filter_name, filter_value) - # output format for filters is list of tuples: - # (field_name, key_name, op, field_type, value) - new_filters = [] - - for filter_name, filter_value in filters: - if filter_name in ('tags-any', 'tags'): - if ':' in filter_value: - msg = _("Tags are filtered without operator") - raise exception.BadRequest(msg) - new_filters.append( - (filter_name, None, None, None, filter_value)) - continue - - key_name = None - if '.' in filter_name: - filter_name, key_name = filter_name.rsplit('.', 1) - if not isinstance(cls.fields.get(filter_name), - glare_fields.Dict): - msg = _("Field %s is not Dict") % filter_name - raise exception.BadRequest(msg) - - if cls.fields.get(filter_name) is None: - msg = _("Unable filter '%s'") % filter_name - raise exception.BadRequest(msg) - - field_type = cls.fields.get(filter_name) - if isinstance(field_type, glare_fields.List) or isinstance( - field_type, glare_fields.Dict) and key_name is not None: - field_type = field_type.element_type - - try: - op, val = utils.split_filter_op(filter_value) - if isinstance(field_type, glare_fields.Dict): - if op not in ['eq', 'in']: - msg = (_("Unsupported filter type '%s'. The following " - "filters are supported: eq, in") % op) - raise exception.BadRequest(message=msg) - if op == 'in': - new_filters.append(( - filter_name, utils.split_filter_value_for_quotes( - val), op, None, None)) - else: - new_filters.append(( - filter_name, val, op, None, None)) - else: - cls._validate_filter_ops(filter_name, op) - if op == 'in': - value = [field_type.coerce(cls(), filter_name, value) - for value in - utils.split_filter_value_for_quotes(val)] - else: - value = field_type.coerce(cls(), filter_name, val) - new_filters.append( - (filter_name, key_name, op, - cls._get_field_type(field_type), value)) - except ValueError: - msg = _("Invalid filter value: %s") % str(val) - raise exception.BadRequest(msg) - - return new_filters - - @classmethod - def list(cls, context, filters=None, marker=None, limit=None, - sort=None, latest=False): - """Return list of artifacts requested by user. - - :param context: user context - :param filters: filters that need to be applied to artifact - :param marker: the artifact that considered as begin of the list - so all artifacts before marker (including marker itself) will not be - added to artifact list - :param limit: maximum number of items in the list - :param sort: sorting options - :param latest: flag that indicates, that only artifacts with highest - versions should be returned in output - :return: list of artifact objects - """ - - default_sort_parameters = ( - ('created_at', 'desc', None), ('id', 'asc', None)) - # Parse sort parameters and update them with defaults - sort = [] if sort is None else cls._parse_sort_values(sort) - for default_sort in default_sort_parameters: - for s in sort: - # If the default sort parameter already in the list - skip it - if s[0] == default_sort[0]: - break - else: - sort.append(default_sort) - - default_filter_parameters = [ - ('status', None, 'neq', None, 'deleted')] - if cls.get_type_name() != 'all': - default_filter_parameters.append( - ('type_name', None, 'eq', None, cls.get_type_name())) - # Parse filter parameters and update them with defaults - filters = [] if filters is None else cls._parse_filter_values(filters) - for default_filter in default_filter_parameters: - if default_filter not in filters: - filters.append(default_filter) - - return [cls.init_artifact(context, af) - for af in cls.db_api.list( - context, filters, marker, limit, sort, latest)] - - @classmethod - def delete(cls, context, af): - """Delete artifact and all its blobs from Glare. - - :param context: user context - :param af: artifact object targeted for deletion - """ - # marking artifact as deleted - cls.db_api.save(context, af.id, {'status': 'deleted'}) - - # collect all uploaded blobs - blobs = {} - for name in af.fields: - if cls.is_blob(name) or cls.is_blob_dict(name): - field = getattr(af, name) - if field: - blobs[name] = field - - LOG.debug("Marked artifact %(artifact)s as deleted.", - {'artifact': af.id}) - - return blobs - - @classmethod - def get_max_blob_size(cls, field_name): - """Get the maximum allowed blob size in bytes. - - :param field_name: blob or blob dict field name - :return: maximum blob size in bytes - """ - return getattr(cls.fields[field_name], 'max_blob_size') - - @classmethod - def get_max_folder_size(cls, field_name): - """Get the maximum allowed folder size in bytes. - - :param field_name: folder (blob dict) field name - :return: maximum folder size in bytes - """ - return getattr(cls.fields[field_name], 'max_folder_size') - - @classmethod - def update_blob(cls, context, af_id, field_name, values): - """Update blob info in database. - - :param context: user context - :param af_id: id of modified artifact - :param field_name: blob or blob dict field name - :param values: updated blob values - :return: updated artifact definition in Glare - """ - af_upd = cls.db_api.update_blob(context, af_id, {field_name: values}) - return cls.init_artifact(context, af_upd) - - @classmethod - def validate_activate(cls, context, af): - """Validation hook for activation.""" - pass - - @classmethod - def validate_upload(cls, context, af, field_name, fd): - """Validation hook for uploading.""" - return fd, None - - @classmethod - def validate_download(cls, context, af, field_name, fd): - """Validation hook for downloading.""" - return fd, None - - @classmethod - def validate_publish(cls, context, af): - """Validation hook for publishing.""" - pass - - @classmethod - def validate_delete(cls, context, af): - """Validation hook for deletion.""" - pass - - @classmethod - def get_default_store(cls, context=None, af=None, - field_name=None, blob_key=None): - """Return a default store type for artifact type.""" - for t in CONF.enabled_artifact_types: - type_name, __, store_name = t.partition(':') - if type_name == cls.get_type_name(): - return store_name - - def to_notification(self): - """Return notification body that can be send to listeners. - - :return: dict with notification information - """ - return { - 'type': self.get_type_name(), - 'id': self.id, - 'description': self.description, - 'name': self.name, - 'version': self.version, - 'visibility': self.visibility, - 'status': self.status, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'activated_at': self.activated_at, - 'owner': self.owner - } - - def to_dict(self): - """Convert oslo versioned object to dictionary. - - :return: dict with field names and field values - """ - return self.obj_to_primitive()['versioned_object.data'] - - def obj_changes_to_primitive(self): - changes = self.obj_get_changes() - res = {} - for key, val in changes.items(): - if val is not None and hasattr(val, 'to_primitive'): - res[key] = val.to_primitive() - else: - res[key] = val - return res - - @classmethod - def _schema_field(cls, field, field_name=''): - field_type = utils.get_schema_type(field) - schema = {} - - # generate schema for validators - for val in getattr(field, 'validators', []): - schema.update(val.to_jsonschema()) - - schema['type'] = (field_type - if not field.nullable else [field_type, 'null']) - schema['glareType'] = utils.get_glare_type(field) - output_blob_schema = { - 'type': ['object', 'null'], - 'properties': { - 'size': {'type': ['number', 'null']}, - 'md5': {'type': ['string', 'null']}, - 'sha1': {'type': ['string', 'null']}, - 'sha256': {'type': ['string', 'null']}, - 'external': {'type': 'boolean'}, - 'status': {'type': 'string', - 'enum': list( - glare_fields.BlobFieldType.BLOB_STATUS)}, - 'content_type': {'type': 'string'}, - }, - 'required': ['size', 'md5', 'sha1', 'sha256', 'external', 'status', - 'content_type'], - 'additionalProperties': False - } - - if field.system: - schema['readOnly'] = True - - if isinstance(field, glare_fields.Dict): - element_type = utils.get_schema_type(field.element_type) - property_validators = schema.pop('propertyValidators', []) - if field.element_type is glare_fields.BlobFieldType: - schema['additionalProperties'] = output_blob_schema - else: - if schema.get('properties'): - properties = {} - required = schema.pop('required', []) - for key in schema.pop('properties'): - properties[key] = { - 'type': (element_type - if key in required - else [element_type, 'null'])} - for val in property_validators: - properties[key].update(val) - schema['properties'] = properties - schema['additionalProperties'] = False - else: - schema['additionalProperties'] = {'type': element_type} - for val in property_validators: - schema['additionalProperties'].update(val) - - if isinstance(field, glare_fields.List): - items_validators = schema.pop('itemValidators', []) - schema['items'] = { - 'type': utils.get_schema_type(field.element_type)} - for val in items_validators: - schema['items'].update(val) - - if isinstance(field, glare_fields.BlobField): - schema.update(output_blob_schema) - - if isinstance(field, fields.DateTimeField): - schema['format'] = 'date-time' - - if field_name == 'status': - schema['enum'] = cls.STATUS - - if field.description: - schema['description'] = field.description - if field.mutable: - schema['mutable'] = True - if field.sortable: - schema['sortable'] = True - if not field.required_on_activate: - schema['required_on_activate'] = False - if field._default is not None: - schema['default'] = field._default - - schema['filter_ops'] = field.filter_ops - - return schema - - @classmethod - def gen_schemas(cls): - """Return json schema representation of the artifact type.""" - schemas_prop = {} - for field_name, field in cls.fields.items(): - schemas_prop[field_name] = cls._schema_field( - field, field_name=field_name) - schemas = {'properties': schemas_prop, - 'name': cls.get_type_name(), - 'version': cls.VERSION, - 'title': 'Artifact type %s of version %s' % - (cls.get_type_name(), cls.VERSION), - 'type': 'object', - 'required': ['name']} - - return schemas diff --git a/glare/objects/heat_environment.py b/glare/objects/heat_environment.py deleted file mode 100644 index 4d0bd9c..0000000 --- a/glare/objects/heat_environment.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from glare.objects import base -from glare.objects.meta import wrappers - -Blob = wrappers.BlobField.init - - -class HeatEnvironment(base.BaseArtifact): - - fields = { - 'environment': Blob(description="Heat Environment text body."), - } - - @classmethod - def get_type_name(cls): - return "heat_environments" diff --git a/glare/objects/heat_template.py b/glare/objects/heat_template.py deleted file mode 100644 index df32dac..0000000 --- a/glare/objects/heat_template.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_versionedobjects import fields - -from glare.objects import base -from glare.objects.meta import fields as glare_fields -from glare.objects.meta import wrappers - -Field = wrappers.Field.init -Blob = wrappers.BlobField.init -Dict = wrappers.DictField.init -Folder = wrappers.FolderField.init - - -class HeatTemplate(base.BaseArtifact): - - fields = { - 'environments': Dict(glare_fields.LinkFieldType, - mutable=True, - description="References to Heat Environments " - "that can be used with current " - "template."), - 'template': Blob(description="Heat template body."), - 'nested_templates': Folder(description="Dict of nested templates " - "where key is the name of " - "template and value is " - "nested template body."), - 'default_envs': Dict(fields.String, mutable=True, - description="Default environments that can be " - "applied to the template if no " - "environments specified by user.") - } - - @classmethod - def get_type_name(cls): - return "heat_templates" diff --git a/glare/objects/image.py b/glare/objects/image.py deleted file mode 100644 index c7728cd..0000000 --- a/glare/objects/image.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from oslo_versionedobjects import fields - -from glare.objects import base -from glare.objects.meta import validators -from glare.objects.meta import wrappers - -Field = wrappers.Field.init -Blob = wrappers.BlobField.init - - -class Image(base.BaseArtifact): - - fields = { - 'container_format': Field(fields.StringField, - validators=[validators.AllowedValues( - ['ami', 'ari', 'aki', 'bare', - 'ovf', 'ova', 'docker'])], - description="Image container format."), - 'disk_format': Field(fields.StringField, - validators=[validators.AllowedValues( - ['ami', 'ari', 'aki', 'vhd', 'vhdx', - 'vmdk', 'raw', 'qcow2', 'vdi', 'iso'])], - description="Image disk format."), - 'min_ram': Field(fields.IntegerField, required_on_activate=False, - validators=[validators.MinNumberSize(0)], - description="Minimal RAM required to boot image."), - 'min_disk': Field(fields.IntegerField, required_on_activate=False, - validators=[validators.MinNumberSize(0)], - description="Minimal disk space " - "required to boot image."), - 'image': Blob(max_blob_size=1073741824000, - required_on_activate=False, - description="Image binary."), - 'kernel_id': Field(fields.StringField, - required_on_activate=False, - validators=[validators.UUID()], - description="ID of image stored in Glare that " - "should be used as the kernel when " - "booting an AMI-style image."), - 'ramdisk_id': Field(fields.StringField, - required_on_activate=False, - validators=[validators.UUID()], - description="ID of image stored in Glare that " - "should be used as the ramdisk when " - "booting an AMI-style image."), - 'instance_uuid': Field(fields.StringField, - required_on_activate=False, - description="Metadata which can be used to " - "record which instance this image " - "is associated with. " - "(Informational only, does not " - "create an instance snapshot.)"), - 'architecture': Field(fields.StringField, - required_on_activate=False, - description="Operating system architecture as " - "specified in http://docs.openstack." - "org/trunk/openstack-compute/admin/" - "content/adding-images.html"), - 'os_distro': Field(fields.StringField, - required_on_activate=False, - description="Common name of operating system " - "distribution as specified in " - "http://docs.openstack.org/trunk/" - "openstack-compute/admin/content/" - "adding-images.html"), - 'os_version': Field(fields.StringField, - required_on_activate=False, - description="Operating system version as " - "specified by the distributor"), - } - - @classmethod - def get_type_name(cls): - return "images" diff --git a/glare/objects/meta/__init__.py b/glare/objects/meta/__init__.py deleted file mode 100644 index 8a30ca1..0000000 --- a/glare/objects/meta/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2017 Nokia -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from glare.objects.meta import wrappers -# for backward compatibility when 'wrappers' module was called 'attribute' -attribute = wrappers diff --git a/glare/objects/meta/fields.py b/glare/objects/meta/fields.py deleted file mode 100644 index bd333e0..0000000 --- a/glare/objects/meta/fields.py +++ /dev/null @@ -1,184 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import jsonschema -from jsonschema import exceptions as json_exceptions -from oslo_versionedobjects import fields -import semantic_version -import six -import six.moves.urllib.parse as urlparse -import six.moves.urllib.request as urlrequest - -from glare.common import exception -from glare.i18n import _ - - -class Version(fields.FieldType): - - @staticmethod - def coerce(obj, field, value): - return str(semantic_version.Version.coerce(str(value))) - - -class VersionField(fields.AutoTypedField): - AUTO_TYPE = Version() - - -class BlobFieldType(fields.FieldType): - """Blob field contains reference to blob location. - """ - BLOB_STATUS = (SAVING, ACTIVE) = ('saving', 'active') - - BLOB_SCHEMA = { - 'type': 'object', - 'properties': { - 'url': {'type': ['string', 'null'], 'format': 'uri', - 'maxLength': 2048}, - 'size': {'type': ['number', 'null']}, - 'md5': {'type': ['string', 'null']}, - 'sha1': {'type': ['string', 'null']}, - 'sha256': {'type': ['string', 'null']}, - 'external': {'type': 'boolean'}, - 'id': {'type': 'string'}, - 'status': {'type': 'string', - 'enum': list(BLOB_STATUS)}, - 'content_type': {'type': ['string', 'null']}, - }, - 'required': ['url', 'size', 'md5', 'sha1', 'sha256', 'external', - 'status', 'id', 'content_type'] - } - - @staticmethod - def coerce(obj, field, value): - """Validate and store blob info inside oslo.vo""" - if not isinstance(value, dict): - raise ValueError(_("Blob value must be dict. Got %s type instead") - % type(value)) - try: - jsonschema.validate(value, BlobFieldType.BLOB_SCHEMA) - except json_exceptions.ValidationError as e: - raise ValueError(e) - - return value - - @staticmethod - def to_primitive(obj, field, value): - prim = {key: val for key, val in value.items() - if key != 'id'} - - if not value.get('external'): - url = '/artifacts/%(name)s/%(id)s/' % { - "name": obj.get_type_name(), - 'id': obj.id - } - blob_path = field.split('[') - url = url + blob_path[0] - if len(blob_path) > 1: - url += '/%s' % blob_path[1][1:-2] - prim['url'] = url - return prim - - -class BlobField(fields.AutoTypedField): - AUTO_TYPE = BlobFieldType() - - -class LinkFieldType(fields.FieldType): - """Link field specifies Artifact dependency on other artifact or some - external resource. From technical perspective it is just soft link to Glare - Artifact or https/http resource. So Artifact users can download the - referenced file by that link. - """ - - @staticmethod - def is_external(link): - return link.startswith('http') - - @staticmethod - def get_type_name(link): - url = link.split('/') - if len(url) == 4: - return url[2] - else: - raise ValueError(_("It is not possible to " - "extract type_name from link %s"), link) - - @staticmethod - def coerce(obj, field, value): - # to remove the existing link user sets its value to None, - # we have to consider this case. - if value is None: - return value - # check that value is string - if not isinstance(value, six.string_types): - raise ValueError(_('A string is required in field %(field)s, ' - 'not a %(type)s') % - {'field': field, 'type': type(value).__name__}) - # determine if link is external or internal - external = LinkFieldType.is_external(value) - # validate link itself - if external: - link = urlparse.urlparse(value) - if link.scheme not in ('http', 'https'): - raise ValueError(_('Only http and https requests ' - 'are allowed in url %s') % value) - try: - with urlrequest.urlopen(value) as data: - data.read(1) - except Exception: - raise ValueError( - _('Link %(link)s is not valid in field ' - '%(field)s. The link must be either valid url or ' - 'reference to artifact. Example: ' - 'http://glarehost:9494/artifacts//' - '' - ) % {'link': value, 'field': field}) - else: - result = value.split('/') - if len(result) != 4 or result[1] != 'artifacts': - raise ValueError( - _('Link %(link)s is not valid in field ' - '%(field)s. The link must be either valid url or ' - 'reference to artifact. Example: ' - '/artifacts//' - ) % {'link': value, 'field': field}) - # try to find the referenced artifact - try: - obj.db_api.get(obj.obj_context, result[3]) - except exception.NotFound: - raise ValueError( - _("Link %(link)s is not valid in field %(field)s, because " - "artifact with id %(art_id)s doesn't exist" - ) % {'link': value, 'field': field, 'art_id': result[3]} - ) - - return value - - -class Link(fields.AutoTypedField): - AUTO_TYPE = LinkFieldType() - - -class List(fields.AutoTypedField): - - def __init__(self, element_type, **kwargs): - self.AUTO_TYPE = fields.List(element_type()) - super(List, self).__init__(**kwargs) - - -class Dict(fields.AutoTypedField): - - def __init__(self, element_type, **kwargs): - self.AUTO_TYPE = fields.Dict(element_type()) - super(Dict, self).__init__(**kwargs) diff --git a/glare/objects/meta/file_utils.py b/glare/objects/meta/file_utils.py deleted file mode 100644 index 9d2633d..0000000 --- a/glare/objects/meta/file_utils.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright 2017 Nokia -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Contains additional file utils that may be useful for upload hooks.""" - -import io -import os -import tempfile -import zipfile - -from oslo_config import cfg -from oslo_log import log as logging -from oslo_utils import excutils -from oslo_utils import uuidutils - -from glare.common import store_api -from glare.common import utils -from glare.objects.meta import fields as glare_fields - -CONF = cfg.CONF -LOG = logging.getLogger(__name__) - - -def create_temporary_file(stream, suffix=''): - """Create a temporary local file from a stream. - - :param stream: stream of bytes to be stored in a temporary file - :param suffix: (optional) file name suffix - """ - tfd, path = tempfile.mkstemp(suffix=suffix) - while True: - data = stream.read(100000) - if data == b'': # end of file reached - break - os.write(tfd, data) - tfile = os.fdopen(tfd, "rb") - return tfile, path - - -def extract_zip_to_temporary_folder(tfile): - """Create temporary folder and extract all file contents there. - - :param tfile: zip archive to be extracted - """ - zip_ref = zipfile.ZipFile(tfile, 'r') - tdir = tempfile.mkdtemp() - zip_ref.extractall(tdir) - zip_ref.close() - return tdir - - -def upload_content_file(context, af, data, blob_dict, key_name, - content_type='application/octet-stream'): - """Upload a file to a blob dictionary. - - :param context: user context - :param af: artifact object - :param data: bytes that need to be stored in the blob dictionary - :param blob_dict: name of the blob_dictionary field - :param key_name: name of key in the dictionary - :param content_type: (optional) specifies mime type of uploading data - """ - blob_id = uuidutils.generate_uuid() - # create an an empty blob instance in db with 'saving' status - blob = {'url': None, 'size': None, 'md5': None, 'sha1': None, - 'sha256': None, 'status': glare_fields.BlobFieldType.SAVING, - 'external': False, 'content_type': content_type, 'id': blob_id} - - getattr(af, blob_dict)[key_name] = blob - af = af.update_blob(context, af.id, blob_dict, getattr(af, blob_dict)) - - # try to perform blob uploading to storage backend - try: - default_store = af.get_default_store(context, af, blob_dict, key_name) - location_uri, size, checksums = store_api.save_blob_to_store( - blob_id, data, context, af.get_max_blob_size(blob_dict), - default_store) - except Exception: - # if upload failed remove blob from db and storage - with excutils.save_and_reraise_exception(logger=LOG): - del getattr(af, blob_dict)[key_name] - af = af.update_blob(context, af.id, - blob_dict, getattr(af, blob_dict)) - # update blob info and activate it - blob.update({'url': location_uri, - 'status': glare_fields.BlobFieldType.ACTIVE, - 'size': size}) - blob.update(checksums) - getattr(af, blob_dict)[key_name] = blob - af.update_blob(context, af.id, blob_dict, getattr(af, blob_dict)) - - -def unpack_zip_archive_in_memory(context, af, field_name, fd): - """Unpack zip archive in memory and write its content to artifact folder. - - :param context: user context - :param af: artifact object - :param field_name: blob dict name where to unpack the data - :param fd: zip archive - :return io.BytesIO object - simple stream of in-memory bytes - """ - # Warning: usage of this function is potentially harmful, because it - # doesn't limit how much data it writes to ram. Careless usage in artifact - # types may cause denial of the service. - # Thus it should be used only with blobs with reduced max_blob_size - - flobj = io.BytesIO(fd.read()) - while True: - data = fd.read(65536) - if data == b'': # end of file reached - break - flobj.write(data) - - zip_ref = zipfile.ZipFile(flobj, 'r') - for name in zip_ref.namelist(): - if not name.endswith('/'): - upload_content_file( - context, af, utils.BlobIterator(zip_ref.read(name)), - field_name, name) - flobj.seek(0) - return flobj diff --git a/glare/objects/meta/registry.py b/glare/objects/meta/registry.py deleted file mode 100644 index 4f5add5..0000000 --- a/glare/objects/meta/registry.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import collections -import importlib -import pkgutil -import sys - -from oslo_config import cfg -from oslo_config import types as conf_types -from oslo_log import log as logging -from oslo_versionedobjects import base as vo_base - -from glare.common import exception -from glare.i18n import _ -from glare.objects import base - -CONF = cfg.CONF - -LOG = logging.getLogger(__name__) - -registry_options = [ - cfg.ListOpt('enabled_artifact_types', - default=['heat_templates', 'heat_environments', - 'murano_packages', 'tosca_templates', 'images'], - item_type=conf_types.String(), - help=_("List of enabled artifact types that will be " - "available to user")), - cfg.ListOpt('custom_artifact_types_modules', default=[], - item_type=conf_types.String(), - help=_("List of custom user modules with artifact types that " - "will be uploaded by Glare dynamically during service " - "startup.")) -] -CONF.register_opts(registry_options) - - -def import_submodules(module): - """Import all submodules of a module. - - :param module: Package name - :return: list of imported modules - """ - package = sys.modules[module] - return [ - importlib.import_module(module + '.' + name) - for loader, name, is_pkg in pkgutil.walk_packages(package.__path__)] - - -def import_modules_list(modules): - custom_module_list = [] - for module_name in modules: - try: - custom_module_list.append(importlib.import_module(module_name)) - except Exception as e: - LOG.exception(e) - LOG.error("Cannot import custom artifact type from module " - "%(module_name)%s. Error: %(error)s", - {'module_name': module_name, 'error': str(e)}) - return custom_module_list - - -def get_subclasses(module, base_class): - subclasses = [] - for name in dir(module): - obj = getattr(module, name) - try: - if issubclass(obj, base_class) and obj != base_class: - subclasses.append(obj) - except TypeError: - pass - return subclasses - - -class ArtifactRegistry(vo_base.VersionedObjectRegistry): - """Artifact Registry is responsible for registration of artifacts and - returning appropriate artifact types based on artifact type name. - """ - - @classmethod - def register_all_artifacts(cls): - """Register all artifacts in Glare.""" - # get all submodules in glare.objects - # please note that we registering trusted modules first - # and applying custom modules after that to allow custom modules - # to specify custom logic inside - modules = (import_submodules('glare.objects') + - import_modules_list( - CONF.custom_artifact_types_modules)) - # get all versioned object classes in module - supported_types = [] - for module in modules: - supported_types.extend(get_subclasses(module, base.BaseArtifact)) - types = [t.partition(':')[0] for t in CONF.enabled_artifact_types] - for type_name in set(types + ['all']): - for af_type in supported_types: - if type_name == af_type.get_type_name(): - cls.register(af_type) - break - else: - raise exception.TypeNotFound(name=type_name) - - @classmethod - def get_artifact_type(cls, type_name): - """Return artifact type based on artifact type name. - - :param type_name: name of artifact type - :return: artifact class - """ - for name, af_type in cls.obj_classes().items(): - if af_type[0].get_type_name() == type_name: - return af_type[0] - raise exception.TypeNotFound(name=type_name) - - @classmethod - def reset_registry(cls): - """Resets all registered artifact type classes.""" - cls._registry._obj_classes = collections.defaultdict(list) diff --git a/glare/objects/meta/validators.py b/glare/objects/meta/validators.py deleted file mode 100644 index 6df736f..0000000 --- a/glare/objects/meta/validators.py +++ /dev/null @@ -1,397 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc -import uuid - -from oslo_log import log as logging -from oslo_versionedobjects import fields -import six - -from glare.common import exception -from glare.i18n import _ -from glare.objects.meta import fields as glare_fields - -LOG = logging.getLogger(__name__) - - -@six.add_metaclass(abc.ABCMeta) -class Validator(object): - """Common interface for all validators.""" - - @staticmethod - @abc.abstractmethod - def get_allowed_types(): - raise NotImplementedError() - - def check_type_allowed(self, field_type): - if not issubclass(field_type, self.get_allowed_types()): - # try to check if field_type is correct - # in case of element_type passed - allowed_field_types = tuple(type(field.AUTO_TYPE) - for field in self.get_allowed_types() - if hasattr(field, 'AUTO_TYPE')) - if not issubclass(field_type, allowed_field_types): - raise exception.IncorrectArtifactType( - _("%(type)s is not allowed for validator " - "%(val)s. Allowed types are %(allowed)s.") % { - "type": str(field_type), - "val": str(self.__class__), - "allowed": str(self.get_allowed_types())}) - - def to_jsonschema(self): - return {} - - @abc.abstractmethod - def __call__(self, value): - raise NotImplemented - - -class UUID(Validator): - - @staticmethod - def get_allowed_types(): - return fields.StringField, - - def __call__(self, value): - uuid.UUID(value, version=4) - - def to_jsonschema(self): - return {'pattern': ('^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F])' - '{4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$')} - - -class AllowedValues(Validator): - - def __init__(self, allowed_values): - self.allowed_values = allowed_values - - @staticmethod - def get_allowed_types(): - return fields.StringField, fields.IntegerField, fields.FloatField - - def __call__(self, value): - if value not in self.allowed_values: - raise ValueError(_("Value must be one of the following: %s") % - ', '.join(map(str, self.allowed_values))) - - def to_jsonschema(self): - return {'enum': self.allowed_values} - - -class Version(Validator): - - @staticmethod - def get_allowed_types(): - return glare_fields.VersionField, - - def __call__(self, value): - pass - - def to_jsonschema(self): - return {'pattern': ('/^([0-9]+)\.([0-9]+)\.([0-9]+)(?:-([0-9A-Za-z-]' - '+(?:\.[0-9A-Za-z-]+)*))?(?:\+[0-9A-Za-z-]+)?$/')} - - -@six.add_metaclass(abc.ABCMeta) -class SizeValidator(Validator): - - def __init__(self, size): - self.size = size - - -class MaxStrLen(SizeValidator): - - @staticmethod - def get_allowed_types(): - return fields.StringField, - - def __call__(self, value): - l = len(value) - if l > self.size: - raise ValueError( - _("String length must be less than %(size)d. " - "Current length: %(cur)d") % {'size': self.size, - 'cur': l}) - - def to_jsonschema(self): - return {'maxLength': self.size} - - -class MinStrLen(SizeValidator): - - @staticmethod - def get_allowed_types(): - return fields.StringField, - - def __call__(self, value): - l = len(value) - if l < self.size: - raise ValueError( - _("String length must be less than %(size)d. " - "Current length: %(cur)d") % {'size': self.size, - 'cur': l}) - - def to_jsonschema(self): - return {'minLength': self.size} - - -class ForbiddenChars(Validator): - - def __init__(self, forbidden_chars): - self.forbidden_chars = forbidden_chars - - @staticmethod - def get_allowed_types(): - return fields.StringField, - - def __call__(self, value): - for fc in self.forbidden_chars: - if fc in value: - raise ValueError( - _("Forbidden character %(char)c found in string " - "%(string)s") - % {"char": fc, "string": value}) - - def to_jsonschema(self): - return {'pattern': '^[^%s]+$' % ''.join(self.forbidden_chars)} - - -@six.add_metaclass(abc.ABCMeta) -class MaxSize(SizeValidator): - - def __call__(self, value): - l = len(value) - if l > self.size: - raise ValueError( - _("Number of items must be less than " - "%(size)d. Current size: %(cur)d") % - {'size': self.size, 'cur': l}) - - -class MaxDictSize(MaxSize): - - @staticmethod - def get_allowed_types(): - return glare_fields.Dict, - - def to_jsonschema(self): - return {'maxProperties': self.size} - - -class MaxListSize(MaxSize): - - @staticmethod - def get_allowed_types(): - return glare_fields.List, - - def to_jsonschema(self): - return {'maxItems': self.size} - - -@six.add_metaclass(abc.ABCMeta) -class MinSize(SizeValidator): - - def __call__(self, value): - l = len(value) - if l < self.size: - raise ValueError( - _("Number of items must be greater than " - "%(size)d. Current size: %(cur)d") % - {'size': self.size, 'cur': l}) - - -class MinDictSize(MinSize): - - @staticmethod - def get_allowed_types(): - return glare_fields.Dict, - - def to_jsonschema(self): - return {'minProperties': self.size} - - -class MinListSize(MinSize): - - @staticmethod - def get_allowed_types(): - return glare_fields.List, - - def to_jsonschema(self): - return {'minItems': self.size} - - -class MaxNumberSize(SizeValidator): - - def __call__(self, value): - if value > self.size: - raise ValueError("Number is too big: %d. Max allowed number is " - "%d" % (value, self.size)) - - @staticmethod - def get_allowed_types(): - return fields.IntegerField, fields.FloatField - - def to_jsonschema(self): - return {'maximum': self.size} - - -class MinNumberSize(SizeValidator): - - def __call__(self, value): - if value < self.size: - raise ValueError("Number is too small: %d. Min allowed number is " - "%d" % (value, self.size)) - - @staticmethod - def get_allowed_types(): - return fields.IntegerField, fields.FloatField - - def to_jsonschema(self): - return {'minimum': self.size} - - -class Unique(Validator): - - def __init__(self, convert_to_set=False): - self.convert_to_set = convert_to_set - - @staticmethod - def get_allowed_types(): - return glare_fields.List, - - def __call__(self, value): - if self.convert_to_set: - value[:] = list(set(value)) - elif len(value) != len(set(value)): - raise ValueError(_("List items %s must be unique.") % value) - - def to_jsonschema(self): - return {'uniqueItems': True} - - -class AllowedDictKeys(Validator): - - def __init__(self, allowed_keys): - self.allowed_items = allowed_keys - - @staticmethod - def get_allowed_types(): - return glare_fields.Dict, - - def __call__(self, value): - for item in value: - if item not in self.allowed_items: - raise ValueError(_("Key %(item)s is not allowed in dict. " - "Allowed key values: %(allowed)s") % - {"item": item, - "allowed": ', '.join(self.allowed_items)}) - - def to_jsonschema(self): - return { - 'properties': {prop: {} for prop in self.allowed_items}, - } - - -class RequiredDictKeys(Validator): - - def __init__(self, required_keys): - self.required_items = required_keys - - @staticmethod - def get_allowed_types(): - return glare_fields.Dict, - - def __call__(self, value): - for item in self.required_items: - if item not in value: - raise ValueError(_("Key \"%(item)s\" is required in " - "dictionary %(value)s.") % - {"item": item, - "value": ''.join( - '{}:{}, '.format(key, val) - for key, val in value.items())}) - - def to_jsonschema(self): - return {'required': list(self.required_items)} - - -class MaxDictKeyLen(SizeValidator): - - @staticmethod - def get_allowed_types(): - return glare_fields.Dict, - - def __call__(self, value): - for key in value: - if len(str(key)) > self.size: - raise ValueError(_("Dict key length %(key)s must be less than " - "%(size)d.") % {'key': key, - 'size': self.size}) - - -class MinDictKeyLen(SizeValidator): - - @staticmethod - def get_allowed_types(): - return glare_fields.Dict, - - def __call__(self, value): - for key in value: - if len(str(key)) < self.size: - raise ValueError(_("Dict key length %(key)s must be bigger " - "than %(size)d.") % {'key': key, - 'size': self.size}) - - -@six.add_metaclass(abc.ABCMeta) -class ElementValidator(Validator): - - def __init__(self, validators): - self.validators = validators - - -class ListElementValidator(ElementValidator): - - @staticmethod - def get_allowed_types(): - return glare_fields.List, - - def __call__(self, value): - for v in value: - for validator in self.validators: - validator(v) - - def to_jsonschema(self): - return {'itemValidators': [ - val.to_jsonschema() for val in self.validators - ]} - - -class DictElementValidator(ElementValidator): - - @staticmethod - def get_allowed_types(): - return glare_fields.Dict, - - def __call__(self, value): - for v in value.values(): - for validator in self.validators: - validator(v) - - def to_jsonschema(self): - return {'propertyValidators': [ - val.to_jsonschema() for val in self.validators - ]} diff --git a/glare/objects/meta/wrappers.py b/glare/objects/meta/wrappers.py deleted file mode 100644 index 59463a2..0000000 --- a/glare/objects/meta/wrappers.py +++ /dev/null @@ -1,267 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""This file contains classes that wrap nat""" - -from oslo_versionedobjects import fields - -from glare.common import exception as exc -from glare.objects.meta import fields as glare_fields -from glare.objects.meta import validators as val_lib - -FILTERS = ( - FILTER_EQ, FILTER_NEQ, FILTER_IN, FILTER_GT, FILTER_GTE, FILTER_LT, - FILTER_LTE) = ('eq', 'neq', 'in', 'gt', 'gte', 'lt', 'lte') - -DEFAULT_MAX_BLOB_SIZE = 10485760 # 10 Megabytes -DEFAULT_MAX_FOLDER_SIZE = 2673868800 # 2550 Megabytes - - -class Field(object): - def __init__(self, field_class, mutable=False, required_on_activate=True, - system=False, validators=None, nullable=True, default=None, - sortable=False, filter_ops=None, description=""): - """Init and validate field. - Each artifact field has several common properties: - - :param required_on_activate: boolean value indicating if the field - value should be specified for the artifact - before activation (Default:True). - :param mutable: boolean value indicating if the field value may - be changed after the artifact is activated. (Default: False) - :param system: boolean value indicating if the field - value cannot be edited by user (Default: False). - :param sortable: boolean value indicating if there is a possibility - to sort by this fields's values. (Default: False) Only fields of - 4 primitive types may be sortable: integer, string, float and boolean. - :param default: a default value for the field may be specified - (Default: None). - :param validators: a list of objects. When user sets a value to - the field with additional validators Glare checks them before setting - the value and raises ValueError if at least one of the requirements - is not satisfied. - :param filter_ops: a list of available filter operators for the field. - There are seven available operators: - 'eq', 'neq', 'lt', 'lte', 'gt', 'gte', 'in'. - """ - - if not issubclass(field_class, fields.AutoTypedField): - raise exc.IncorrectArtifactType( - "Field class %s must be sub-class of AutoTypedField." % - field_class) - - self.validators = validators or [] - for v in self.validators: - v.check_type_allowed(field_class) - if isinstance(v, val_lib.MaxStrLen): - if v.size > 255 and sortable: - raise exc.IncorrectArtifactType( - "It's forbidden to make field %(field)s " - "sortable if string length can be more than 255 " - "symbols. Maximal allowed length now: %(max)d" % - {"field": str(field_class), 'max': v.size}) - - self.field_class = field_class - self.nullable = nullable - self.default = default - self.vo_props = ['nullable', 'default'] - - self.mutable = mutable - self.required_on_activate = required_on_activate - self.system = system - self.sortable = sortable - - try: - default_ops = self.get_allowed_filter_ops(self.element_type) - except AttributeError: - default_ops = self.get_allowed_filter_ops(field_class) - - if filter_ops is None: - self.filter_ops = default_ops - else: - for op in filter_ops: - if op not in default_ops: - raise exc.IncorrectArtifactType( - "Incorrect filter operator '%s'. " - "Only %s are allowed" % (op, ', '.join(default_ops))) - self.filter_ops = filter_ops - - self.field_props = ['mutable', 'required_on_activate', 'system', - 'sortable', 'filter_ops', 'description'] - self.description = description - - @staticmethod - def get_allowed_filter_ops(field): - if field in (fields.StringField, fields.String): - return [FILTER_EQ, FILTER_NEQ, FILTER_IN] - elif field in (fields.IntegerField, fields.Integer, fields.FloatField, - fields.Float, glare_fields.VersionField): - return FILTERS - elif field in (fields.FlexibleBooleanField, fields.FlexibleBoolean, - glare_fields.Link, glare_fields.LinkFieldType): - return [FILTER_EQ, FILTER_NEQ] - elif field in (glare_fields.BlobField, glare_fields.BlobFieldType): - return [] - elif field is fields.DateTimeField: - return [FILTER_LT, FILTER_GT] - - def get_default_validators(self): - default = [] - if issubclass(self.field_class, fields.StringField): - # check if fields is string - if not any(isinstance(v, val_lib.MaxStrLen) - for v in self.validators) and \ - not any(isinstance(v, val_lib.AllowedValues) - for v in self.validators): - default.append(val_lib.MaxStrLen(255)) - return default - - def get_field(self): - # init the field - vo_props = {prop_name: getattr(self, prop_name) - for prop_name in self.vo_props} - field = self.field_class(**vo_props) - # setup custom field properties - field_props = {prop_name: getattr(self, prop_name) - for prop_name in self.field_props} - for prop, value in field_props.items(): - setattr(field, prop, value) - - # apply custom validators - vals = self.validators - for def_val in self.get_default_validators(): - for val in self.validators: - if type(val) is type(def_val): - break - else: - vals.append(def_val) - - def wrapper(coerce_func): - def coerce_wrapper(obj, field, value): - try: - val = coerce_func(obj, field, value) - if val is not None: - for check_func in vals: - check_func(val) - return val - except (KeyError, ValueError, TypeError) as e: - msg = "Type: %s. Field: %s. Exception: %s" % ( - obj.get_type_name(), field, str(e)) - raise exc.BadRequest(message=msg) - return coerce_wrapper - - field.coerce = wrapper(field.coerce) - field.validators = vals - return field - - @classmethod - def init(cls, *args, **kwargs): - """Fabric to build fields.""" - return cls(*args, **kwargs).get_field() - - -class CompoundField(Field): - def __init__(self, field_class, element_type, element_validators=None, - **kwargs): - if element_type is None: - raise exc.IncorrectArtifactType("'element_type' must be set for " - "compound type.") - self.element_type = element_type - - super(CompoundField, self).__init__(field_class, **kwargs) - - self.vo_props.append('element_type') - self.field_props.append('element_type') - - self.element_validators = element_validators or [] - if self.sortable: - raise exc.IncorrectArtifactType("'sortable' must be False for " - "compound type.") - - def get_element_validators(self): - default_vals = [] - if issubclass(self.element_type, fields.String): - # check if fields is string - if not any(isinstance(v, val_lib.MaxStrLen) - for v in self.element_validators): - default_vals.append(val_lib.MaxStrLen(255)) - vals = default_vals + self.element_validators - for v in vals: - v.check_type_allowed(self.element_type) - return default_vals + self.element_validators - - -class ListField(CompoundField): - def __init__(self, element_type, max_size=255, **kwargs): - if 'default' not in kwargs: - kwargs['default'] = [] - if element_type is glare_fields.BlobField: - raise exc.IncorrectArtifactType("List of blobs is not allowed " - "to be specified in artifact.") - super(ListField, self).__init__(glare_fields.List, element_type, - **kwargs) - self.validators.append(val_lib.MaxListSize(max_size)) - - def get_default_validators(self): - default_vals = [] - elem_val = val_lib.ListElementValidator( - super(ListField, self).get_element_validators()) - default_vals.append(elem_val) - return default_vals - - -class DictField(CompoundField): - def __init__(self, element_type, max_size=255, **kwargs): - if 'default' not in kwargs: - kwargs['default'] = {} - super(DictField, self).__init__(glare_fields.Dict, element_type, - **kwargs) - self.validators.append(val_lib.MaxDictSize(max_size)) - - def get_default_validators(self): - default_vals = [] - elem_val = val_lib.DictElementValidator( - super(DictField, self).get_element_validators()) - default_vals.append(elem_val) - default_vals.append(val_lib.MaxDictKeyLen(255)) - default_vals.append(val_lib.MinDictKeyLen(1)) - return default_vals - - -class BlobField(Field): - def __init__(self, max_blob_size=DEFAULT_MAX_BLOB_SIZE, **kwargs): - super(BlobField, self).__init__( - field_class=glare_fields.BlobField, **kwargs) - self.max_blob_size = int(max_blob_size) - self.field_props.append('max_blob_size') - - -class FolderField(DictField): - def __init__(self, max_blob_size=DEFAULT_MAX_BLOB_SIZE, - max_folder_size=DEFAULT_MAX_FOLDER_SIZE, **kwargs): - super(FolderField, self).__init__( - element_type=glare_fields.BlobFieldType, **kwargs) - self.max_blob_size = int(max_blob_size) - self.max_folder_size = int(max_folder_size) - self.field_props.append('max_blob_size') - self.field_props.append('max_folder_size') - -# Classes below added for backward compatibility. They shouldn't be used - -Attribute = Field -CompoundAttribute = CompoundField -ListAttribute = ListField -DictAttribute = DictField -BlobAttribute = BlobField -BlobDictAttribute = FolderField diff --git a/glare/objects/murano_package.py b/glare/objects/murano_package.py deleted file mode 100644 index fb6c718..0000000 --- a/glare/objects/murano_package.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_versionedobjects import fields - -from glare.objects import base -from glare.objects.meta import fields as glare_fields -from glare.objects.meta import validators -from glare.objects.meta import wrappers - -Field = wrappers.Field.init -Blob = wrappers.BlobField.init -List = wrappers.ListField.init -Dict = wrappers.DictField.init - - -class MuranoPackage(base.BaseArtifact): - - fields = { - 'package': Blob(required_on_activate=False, - description="Murano Package binary.", - max_blob_size=104857600), - 'type': Field(fields.StringField, - validators=[validators.AllowedValues( - ['Application', 'Library'])], - default='Application', - description="Package type."), - 'display_name': Field(fields.StringField, mutable=True, - description="Package name in human-readable " - "format."), - 'categories': List(fields.String, mutable=True, - description="List of categories specified " - "for the package."), - 'class_definitions': List(fields.String, - validators=[validators.Unique()], - description="List of class definitions in " - "the package."), - 'inherits': Dict(fields.String), - 'keywords': List(fields.String, mutable=True), - 'dependencies': List(glare_fields.LinkFieldType, - required_on_activate=False, - description="List of package dependencies for " - "this package."), - } - - @classmethod - def get_type_name(cls): - return "murano_packages" diff --git a/glare/objects/secret.py b/glare/objects/secret.py deleted file mode 100644 index 921c763..0000000 --- a/glare/objects/secret.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2017 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from glare.objects import base as base_artifact -from glare.objects.meta import validators -from glare.objects.meta import wrappers -from oslo_versionedobjects import fields - -Field = wrappers.Field.init -Blob = wrappers.BlobField.init -Dict = wrappers.DictField.init -Folder = wrappers.FolderField.init - - -class Secret(base_artifact.BaseArtifact): - """The purpose this glare artifact, Secret, is to enable the user to store - 'secret' data such as: Private key, Certificate, Password, SSH keys Etc. - """ - VERSION = '1.0' - - @classmethod - def get_type_name(cls): - return "secrets" - - fields = { - 'payload': Blob( # The encrypted secret data - description="The secret's data to be stored" - ), - - 'payload_content_encoding': Field( - fields.StringField, - required_on_activate=False, - default="base64", - filter_ops=[], - validators=[validators.AllowedValues(["base64"])], - description="Required if payload is encoded. " - "The encoding used for the payload to be" - " able to include it in the JSON request " - "(only base64 supported)" - ), - - 'secret_type': Field( - fields.StringField, - required_on_activate=False, - default="opaque", - sortable=True, - filter_ops=(wrappers.FILTER_EQ,), - validators=[validators.AllowedValues([ - "symmetric", "public", "private", - "passphrase", "certificate", "opaque"])], - description="Used to indicate the type of secret being stored", - ), - - 'algorithm': Field( - fields.StringField, - required_on_activate=False, - filter_ops=(wrappers.FILTER_EQ,), - description="Metadata provided by a user or system for" - " informational purposes" - ), - - 'bit_length': Field( - fields.IntegerField, - required_on_activate=False, - sortable=True, - validators=[validators.MinNumberSize(1)], - description="Metadata provided by a user or system" - " for informational purposes." - " Value must be greater than zero." - ), - - 'mode': Field( - fields.StringField, - required_on_activate=False, - filter_ops=(wrappers.FILTER_EQ,), - description="Metadata provided by a user or" - " system for informational purposes."), - } diff --git a/glare/objects/tosca_template.py b/glare/objects/tosca_template.py deleted file mode 100644 index 8b28570..0000000 --- a/glare/objects/tosca_template.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_versionedobjects import fields - -from glare.objects import base -from glare.objects.meta import wrappers - -Field = wrappers.Field.init -Blob = wrappers.BlobField.init - - -class TOSCATemplate(base.BaseArtifact): - - fields = { - 'template_format': Field(fields.StringField, - description="TOSCA template format."), - 'template': Blob(description="TOSCA template body.") - } - - @classmethod - def get_type_name(cls): - return "tosca_templates" diff --git a/glare/opts.py b/glare/opts.py deleted file mode 100644 index f861708..0000000 --- a/glare/opts.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright (c) 2014 OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -__all__ = [ - 'list_artifacts_opts' -] - -import copy -import itertools - -from osprofiler import opts as profiler - -import glare.api.middleware.context -import glare.api.middleware.keycloak_auth -import glare.api.v1.resource -import glare.api.versions -import glare.common.config -import glare.common.wsgi -import glare.notification -import glare.objects.base -from glare.objects.meta import registry -import glare.scrubber - -_artifacts_opts = [ - (None, list(itertools.chain( - glare.api.middleware.context.context_opts, - glare.api.v1.resource.list_configs, - glare.api.versions.versions_opts, - glare.common.config.common_opts, - glare.common.wsgi.bind_opts, - glare.common.wsgi.eventlet_opts, - glare.common.wsgi.socket_opts, - glare.notification.notifier_opts, - glare.objects.base.global_artifact_opts, - registry.registry_options))), - profiler.list_opts()[0], - ('paste_deploy', glare.common.config.paste_deploy_opts), - ('keycloak_oidc', glare.api.middleware.keycloak_auth.keycloak_oidc_opts), - ('scrubber', - glare.scrubber.scrubber_opts + - glare.scrubber.scrubber_cmd_opts + - glare.scrubber.scrubber_cmd_cli_opts) -] - -registry.ArtifactRegistry.register_all_artifacts() -for af_type in registry.ArtifactRegistry.obj_classes().values(): - _artifacts_opts.append( - (af_type[0].get_type_name(), af_type[0].list_artifact_type_opts())) - - -def list_artifacts_opts(): - """Return a list of oslo_config options available in Glare""" - return [(g, copy.deepcopy(o)) for g, o in _artifacts_opts] diff --git a/glare/scrubber.py b/glare/scrubber.py deleted file mode 100644 index 2b34b46..0000000 --- a/glare/scrubber.py +++ /dev/null @@ -1,171 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import eventlet -from oslo_config import cfg -from oslo_log import log as logging - -from glare.api.middleware import context -from glare.common import exception -from glare.common import store_api -from glare.db.sqlalchemy import api as db_api -from glare.i18n import _ - -LOG = logging.getLogger(__name__) - -scrubber_opts = [ - cfg.IntOpt('scrub_time', default=0, min=0, - help=_(""" -The amount of time, in seconds, to delay artifact scrubbing. -When delayed delete is turned on, an artifact is put into -``deleted`` state upon deletion until the scrubber deletes its data. -Typically, soon -after the artifact is put into ``deleted`` state, it is available -for scrubbing. However, scrubbing can be delayed until a later point -using this configuration option. This option denotes the time period -an artifact spends in ``deleted`` state before it is available for -scrubbing. -It is important to realize that this has storage implications. The -larger the ``scrub_time``, the longer the time to reclaim backend -storage from deleted artifacts. -Possible values: - * Any non-negative integer -Related options: - * ``delayed_delete`` -""")), - cfg.IntOpt('scrub_pool_size', default=1, min=1, - help=_(""" -The size of thread pool to be used for scrubbing artifacts. -When there are a large number of artifacts to scrub, it is -beneficial to scrub artifacts in parallel so that the scrub queue -stays in control and the backend storage is reclaimed in a timely -fashion. This configuration option denotes the maximum number of -artifacts to be scrubbed in parallel. The default value is one, -which signifies serial scrubbing. Any value above one indicates -parallel scrubbing. -Possible values: - * Any non-zero positive integer -Related options: - * ``delayed_delete`` -""")), -] - -scrubber_cmd_opts = [ - cfg.IntOpt('wakeup_time', default=300, min=0, - help=_(""" -Time interval, in seconds, between scrubber runs in daemon mode. -Scrubber can be run either as a cron job or daemon. When run as a -daemon, this configuration time specifies the time period between -two runs. When the scrubber wakes up, it fetches and scrubs all -``deleted`` artifacts that are available for scrubbing after taking -``scrub_time`` into consideration. -If the ``wakeup_time`` is set to a large number, there may be a large -number of artifacts to be scrubbed for each run. Also, this impacts -how quickly the backend storage is reclaimed. -Possible values: - * Any non-negative integer -Related options: - * ``daemon`` - * ``delayed_delete`` -""")) -] - -scrubber_cmd_cli_opts = [ - cfg.BoolOpt('daemon', - short='D', - default=False, - help=_(""" -Run scrubber as a daemon. -This boolean configuration option indicates whether scrubber should -run as a long-running process that wakes up at regular intervals to -scrub artifacts. The wake up interval can be specified using the -configuration option ``wakeup_time``. -If this configuration option is set to ``False``, which is the -default value, scrubber runs once to scrub artifacts and exits. -In this case, if the operator wishes to implement continuous -scrubbing of artifacts, scrubber needs to be scheduled as a cron job. -Possible values: - * True - * False -Related options: - * ``wakeup_time`` -""")) -] - -CONF = cfg.CONF -CONF.register_opts(scrubber_opts, group='scrubber') - - -class Daemon(object): - def __init__(self, wakeup_time=300, threads=100): - LOG.info("Starting Daemon: wakeup_time=%(wakeup_time)s " - "threads=%(threads)s", - {'wakeup_time': wakeup_time, 'threads': threads}) - self.wakeup_time = wakeup_time - self.event = eventlet.event.Event() - # This pool is used for periodic instantiation of scrubber - self.daemon_pool = eventlet.greenpool.GreenPool(threads) - - def start(self, application): - self._run(application) - - def wait(self): - try: - self.event.wait() - except KeyboardInterrupt: - LOG.info("Daemon Shutdown on KeyboardInterrupt") - - def _run(self, application): - LOG.debug("Running scrubber application") - self.daemon_pool.spawn_n(application.run, self.event) - eventlet.spawn_after(self.wakeup_time, self._run, application) - LOG.debug("Next run scheduled in %s seconds", self.wakeup_time) - - -class Scrubber(object): - def __init__(self): - self.context = context.RequestContext() - self.context.is_admin = True - self.pool = eventlet.greenpool.GreenPool( - CONF.scrubber.scrub_pool_size) - - def run(self, event=None): - while True: - artifacts = db_api._get_all( - context=self.context, - session=db_api.get_session(), - limit=CONF.scrubber.scrub_pool_size, - sort=[], - filters=[('status', None, 'eq', None, 'deleted')]) - if not artifacts: - break - self.pool.imap(self._scrub_artifact, artifacts) - - @staticmethod - def _scrub_artifact(af): - LOG.info("Begin scrubbing of artifact %s", af.id) - for blob in af.blobs: - if not blob.external: - try: - store_api.delete_blob(blob.url, context=context) - except exception.NotFound: - # data has already been removed - pass - LOG.info("Blobs successfully deleted for artifact %s", af.id) - - # delete artifact itself - db_api.delete(context, af.id, db_api.get_session()) - - LOG.info("Artifact %s was scrubbed", af.id) diff --git a/glare/store/__init__.py b/glare/store/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/store/base_api.py b/glare/store/base_api.py deleted file mode 100644 index c6d8eb4..0000000 --- a/glare/store/base_api.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2017 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -class BaseStoreAPI(object): - - def add_to_backend(self, context, blob_id, data, verifier=None): - """Save data to database store type and return location info - - :param blob_id: id of artifact - :param data: file iterator - :param context: user context - :param verifier:signature verified - :return: database location uri - """ - raise NotImplementedError() - - def get_from_store(self, uri, context): - """Load file from database store - - :param uri: blob uri - :param context: user context - :return: file iterator - """ - raise NotImplementedError() - - def delete_from_store(self, uri, context): - """Delete blob from database store - - :param uri: blob uri - :param context: user context - """ - raise NotImplementedError() diff --git a/glare/store/database.py b/glare/store/database.py deleted file mode 100644 index 9edbd5f..0000000 --- a/glare/store/database.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2017 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from glare.db.sqlalchemy import api as db_api -from glare.store import base_api - - -class DatabaseStoreAPI(base_api.BaseStoreAPI): - """Class that stores all data in sql database.""" - - def add_to_backend(self, blob_id, data, context, verifier=None): - session = db_api.get_session() - return db_api.save_blob_data(context, blob_id, data, session) - - def get_from_store(self, uri, context): - session = db_api.get_session() - return db_api.get_blob_data(context, uri, session) - - def delete_from_store(self, uri, context): - session = db_api.get_session() - return db_api.delete_blob_data(context, uri, session) diff --git a/glare/tests/__init__.py b/glare/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/tests/etc/policy.json b/glare/tests/etc/policy.json deleted file mode 100644 index 9e26dfe..0000000 --- a/glare/tests/etc/policy.json +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/glare/tests/functional/__init__.py b/glare/tests/functional/__init__.py deleted file mode 100644 index 860fb21..0000000 --- a/glare/tests/functional/__init__.py +++ /dev/null @@ -1,688 +0,0 @@ -# Copyright 2011 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Base test class for running non-stubbed tests (functional tests) - -The FunctionalTest class contains helper methods for starting Glare -server, grabbing the logs of each, cleaning up pidfiles, and spinning down -the server. -""" - -import atexit -import datetime -import errno -import os -import platform -import shutil -import signal -import socket -import sys -import tempfile -import time - -import eventlet -import fixtures -from oslo_log import log as logging -from oslo_serialization import jsonutils -# NOTE(jokke): simplified transition to py3, behaves like py2 xrange -from six.moves import range -import six.moves.urllib.parse as urlparse -import testtools - -from glare.api.v1 import resource -from glare.api.v1 import router -from glare.common import utils -from glare.common import wsgi -from glare.db.sqlalchemy import api as db_api -from glare import tests as glare_tests -from glare.tests import utils as test_utils - -execute, get_unused_port = test_utils.execute, test_utils.get_unused_port -tracecmd_osmap = {'Linux': 'strace', 'FreeBSD': 'truss'} - -eventlet.patcher.monkey_patch() - - -class Server(object): - """Class used to easily manage starting and stopping - a server during functional test runs. - """ - def __init__(self, test_dir, port, sock=None): - """Creates a new Server object. - - :param test_dir: The directory where all test stuff is kept. This is - passed from the FunctionalTestCase. - :param port: The port to start a server up on. - """ - self.debug = True - self.no_venv = False - self.test_dir = test_dir - self.bind_port = port - self.conf_file_name = None - self.conf_base = None - self.paste_conf_base = None - self.exec_env = None - self.deployment_flavor = '' - self.needs_database = False - self.log_file = None - self.sock = sock - self.fork_socket = True - self.process_pid = None - self.server_module = None - self.stop_kill = False - - def write_conf(self, **kwargs): - """Writes the configuration file for the server to its intended - destination. Returns the name of the configuration file and - the over-ridden config content (may be useful for populating - error messages). - """ - if not self.conf_base: - raise RuntimeError("Subclass did not populate config_base!") - - conf_override = self.__dict__.copy() - if kwargs: - conf_override.update(**kwargs) - - # A config file and paste.ini to use just for this test...we don't want - # to trample on currently-running Glare servers, now do we? - - conf_dir = os.path.join(self.test_dir, 'etc') - conf_filepath = os.path.join(conf_dir, "%s.conf" % self.server_name) - if os.path.exists(conf_filepath): - os.unlink(conf_filepath) - paste_conf_filepath = conf_filepath.replace(".conf", "-paste.ini") - if os.path.exists(paste_conf_filepath): - os.unlink(paste_conf_filepath) - test_utils.safe_mkdirs(conf_dir) - - def override_conf(filepath, overridden): - with open(filepath, 'w') as conf_file: - conf_file.write(overridden) - conf_file.flush() - return conf_file.name - - overridden_core = self.conf_base % conf_override - self.conf_file_name = override_conf(conf_filepath, overridden_core) - - overridden_paste = '' - if self.paste_conf_base: - overridden_paste = self.paste_conf_base % conf_override - override_conf(paste_conf_filepath, overridden_paste) - - overridden = ('==Core config==\n%s\n==Paste config==\n%s' % - (overridden_core, overridden_paste)) - - return self.conf_file_name, overridden - - def start(self, expect_exit=True, expected_exitcode=0, **kwargs): - """Starts the server. - - Any kwargs passed to this method will override the configuration - value in the conf file used in starting the servers. - """ - - # Ensure the configuration file is written - self.write_conf(**kwargs) - - self.create_database() - - cmd = ("%(server_module)s --config-file %(conf_file_name)s" - % {"server_module": self.server_module, - "conf_file_name": self.conf_file_name}) - cmd = "%s -m %s" % (sys.executable, cmd) - # close the sock and release the unused port closer to start time - if self.exec_env: - exec_env = self.exec_env.copy() - else: - exec_env = {} - pass_fds = set() - if self.sock: - if not self.fork_socket: - self.sock.close() - self.sock = None - else: - fd = os.dup(self.sock.fileno()) - exec_env[utils.GLARE_TEST_SOCKET_FD_STR] = str(fd) - pass_fds.add(fd) - self.sock.close() - - self.process_pid = test_utils.fork_exec(cmd, - logfile=os.devnull, - exec_env=exec_env, - pass_fds=pass_fds) - - self.stop_kill = not expect_exit - if self.pid_file: - with open(self.pid_file, 'w') as pf: - pf.write('%d\n' % self.process_pid) - if not expect_exit: - rc = 0 - try: - os.kill(self.process_pid, 0) - except OSError: - raise RuntimeError("The process did not start") - else: - rc = test_utils.wait_for_fork( - self.process_pid, - expected_exitcode=expected_exitcode) - # avoid an FD leak - if self.sock: - os.close(fd) - self.sock = None - return (rc, '', '') - - def reload(self, expect_exit=True, expected_exitcode=0, **kwargs): - """Start and stop the service to reload - - Any kwargs passed to this method will override the configuration - value in the conf file used in starting the servers. - """ - self.stop() - return self.start(expect_exit=expect_exit, - expected_exitcode=expected_exitcode, **kwargs) - - def create_database(self): - """Create database if required for this server""" - if self.needs_database: - conf_dir = os.path.join(self.test_dir, 'etc') - test_utils.safe_mkdirs(conf_dir) - conf_filepath = os.path.join(conf_dir, 'glare.conf') - - glare_db_env = 'GLARE_DB_TEST_SQLITE_FILE' - if glare_db_env in os.environ: - # use the empty db created and cached as a tempfile - # instead of spending the time creating a new one - db_location = os.environ[glare_db_env] - os.system('cp %s %s/tests.sqlite' - % (db_location, self.test_dir)) - else: - cmd = ('%s -m glare.cmd.db_manage --config-file %s upgrade' % - (sys.executable, conf_filepath)) - execute(cmd, no_venv=self.no_venv, exec_env=self.exec_env, - expect_exit=True) - - # copy the clean db to a temp location so that it - # can be reused for future tests - (osf, db_location) = tempfile.mkstemp() - os.close(osf) - os.system('cp %s/tests.sqlite %s' - % (self.test_dir, db_location)) - os.environ[glare_db_env] = db_location - - # cleanup the temp file when the test suite is - # complete - def _delete_cached_db(): - try: - os.remove(os.environ[glare_db_env]) - except Exception: - glare_tests.logger.exception( - "Error cleaning up the file %s" % - os.environ[glare_db_env]) - atexit.register(_delete_cached_db) - - def stop(self): - """Spin down the server.""" - if not self.process_pid: - raise Exception('why is this being called? %s' % self.server_name) - - if self.stop_kill: - os.kill(self.process_pid, signal.SIGTERM) - rc = test_utils.wait_for_fork(self.process_pid, raise_error=False) - return (rc, '', '') - - def dump_log(self, name): - log = logging.getLogger(name) - if not self.log_file or not os.path.exists(self.log_file): - return - with open(self.log_file, 'r') as fptr: - for line in fptr: - log.info(line.strip()) - - -class GlareServer(Server): - - """Server object that starts/stops/manages Glare server""" - - def __init__(self, test_dir, port, policy_file, delayed_delete=False, - pid_file=None, sock=None, **kwargs): - super(GlareServer, self).__init__(test_dir, port, sock=sock) - self.server_name = 'glare' - self.server_module = 'glare.cmd.api' - self.default_store = kwargs.get("default_store", "file") - self.key_file = "" - self.cert_file = "" - self.blob_dir = os.path.join(self.test_dir, "artifacts") - self.pid_file = pid_file or os.path.join(self.test_dir, "glare.pid") - self.log_file = os.path.join(self.test_dir, "glare.log") - self.delayed_delete = delayed_delete - self.workers = 1 - self.policy_file = policy_file - self.policy_default_rule = 'default' - self.disable_path = None - - self.needs_database = True - default_sql_connection = 'sqlite:////%s/tests.sqlite' % self.test_dir - self.sql_connection = os.environ.get('GLARE_TEST_SQL_CONNECTION', - default_sql_connection) - self.lock_path = self.test_dir - - self.send_identity_headers = False - self.enabled_artifact_types = '' - self.custom_artifact_types_modules = '' - - self.conf_base = """[DEFAULT] -debug = %(debug)s -default_log_levels = eventlet.wsgi.server=DEBUG -bind_host = 127.0.0.1 -bind_port = %(bind_port)s -key_file = %(key_file)s -cert_file = %(cert_file)s -log_file = %(log_file)s -delayed_delete = %(delayed_delete)s -workers = %(workers)s -lock_path = %(lock_path)s -enabled_artifact_types = %(enabled_artifact_types)s -custom_artifact_types_modules = %(custom_artifact_types_modules)s -[oslo_policy] -policy_file = %(policy_file)s -policy_default_rule = %(policy_default_rule)s -[paste_deploy] -flavor = %(deployment_flavor)s -[glance_store] -filesystem_store_datadir=%(blob_dir)s -default_store = %(default_store)s -[database] -connection = %(sql_connection)s -""" - self.paste_conf_base = """[pipeline:glare-api] -pipeline = faultwrapper versionnegotiation trustedauth glarev1api - -[pipeline:glare-api-noauth] -pipeline = faultwrapper versionnegotiation context glarev1api - -[app:glarev1api] -paste.app_factory = - glare.tests.functional:TestRouter.factory - -[filter:faultwrapper] -paste.filter_factory = - glare.api.middleware.fault:GlareFaultWrapperFilter.factory - -[filter:versionnegotiation] -paste.filter_factory = - glare.api.middleware.version_negotiation: - GlareVersionNegotiationFilter.factory - -[filter:context] -paste.filter_factory = glare.api.middleware.context:ContextMiddleware.factory - -[filter:trustedauth] -paste.filter_factory = - glare.api.middleware.context:TrustedAuthMiddleware.factory -""" - - -class ScrubberDaemon(Server): - """ - Server object that starts/stops/manages the Scrubber server - """ - - def __init__(self, test_dir, policy_file, daemon=False, **kwargs): - # NOTE(jkoelker): Set the port to 0 since we actually don't listen - super(ScrubberDaemon, self).__init__(test_dir, 0) - self.server_name = 'scrubber' - self.server_module = 'glare.cmd.%s' % self.server_name - self.daemon = daemon - - self.blob_dir = os.path.join(self.test_dir, "artifacts") - self.scrub_time = 5 - self.pid_file = os.path.join(self.test_dir, "scrubber.pid") - self.log_file = os.path.join(self.test_dir, "scrubber.log") - self.lock_path = self.test_dir - - default_sql_connection = 'sqlite:////%s/tests.sqlite' % self.test_dir - self.sql_connection = os.environ.get('GLARE_TEST_SQL_CONNECTION', - default_sql_connection) - self.policy_file = policy_file - self.policy_default_rule = 'default' - - self.conf_base = """[DEFAULT] -debug = %(debug)s -log_file = %(log_file)s -[scrubber] -daemon = %(daemon)s -wakeup_time = 2 -scrub_time = %(scrub_time)s -[glance_store] -filesystem_store_datadir=%(blob_dir)s -[oslo_policy] -policy_file = %(policy_file)s -policy_default_rule = %(policy_default_rule)s -[database] -connection = %(sql_connection)s -idle_timeout = 3600 -""" - - def start(self, expect_exit=True, expected_exitcode=0, **kwargs): - if 'daemon' in kwargs: - expect_exit = False - return super(ScrubberDaemon, self).start( - expect_exit=expect_exit, - expected_exitcode=expected_exitcode, - **kwargs) - - -class FunctionalTest(test_utils.BaseTestCase): - - """Base test class for any test that wants to test the actual - servers and clients and not just the stubbed out interfaces - """ - - inited = False - disabled = False - launched_servers = [] - - def setUp(self): - super(FunctionalTest, self).setUp() - self.test_dir = self.useFixture(fixtures.TempDir()).path - - self.api_protocol = 'http' - self.glare_port, glare_sock = test_utils.get_unused_port_and_socket() - - self.include_scrubber = False - - self.tracecmd = tracecmd_osmap.get(platform.system()) - - conf_dir = os.path.join(self.test_dir, 'etc') - test_utils.safe_mkdirs(conf_dir) - self.copy_data_file('policy.json', conf_dir) - self.policy_file = os.path.join(conf_dir, 'policy.json') - - self.glare_server = GlareServer(self.test_dir, - self.glare_port, - self.policy_file, - sock=glare_sock) - - self.scrubber_daemon = ScrubberDaemon(self.test_dir, self.policy_file) - - self.pid_files = [self.glare_server.pid_file, - self.scrubber_daemon.pid_file] - self.files_to_destroy = [] - self.launched_servers = [] - - def tearDown(self): - if not self.disabled: - self.cleanup() - # We destroy the test data store between each test case, - # and recreate it, which ensures that we have no side-effects - # from the tests - self._reset_database(self.glare_server.sql_connection) - super(FunctionalTest, self).tearDown() - - self.glare_server.dump_log('glare_server') - self.scrubber_daemon.dump_log('scrubber_daemon') - - def set_policy_rules(self, rules): - with open(self.policy_file, 'w') as fap: - fap.write(jsonutils.dumps(rules)) - - def _reset_database(self, conn_string): - conn_pieces = urlparse.urlparse(conn_string) - if conn_string.startswith('sqlite'): - # We leave behind the sqlite DB for failing tests to aid - # in diagnosis, as the file size is relatively small and - # won't interfere with subsequent tests as it's in a per- - # test directory (which is blown-away if the test is green) - pass - elif conn_string.startswith('mysql'): - # We can execute the MySQL client to destroy and re-create - # the MYSQL database, which is easier and less error-prone - # than using SQLAlchemy to do this via MetaData...trust me. - database = conn_pieces.path.strip('/') - loc_pieces = conn_pieces.netloc.split('@') - host = loc_pieces[1] - auth_pieces = loc_pieces[0].split(':') - user = auth_pieces[0] - password = "" - if len(auth_pieces) > 1: - if auth_pieces[1].strip(): - password = "-p%s" % auth_pieces[1] - sql = ("drop database if exists %(database)s; " - "create database %(database)s;") % {'database': database} - cmd = ("mysql -u%(user)s %(password)s -h%(host)s " - "-e\"%(sql)s\"") % {'user': user, 'password': password, - 'host': host, 'sql': sql} - exitcode, out, err = execute(cmd) - self.assertEqual(0, exitcode) - - def cleanup(self): - """Makes sure anything we created or started up in the - tests are destroyed or spun down - """ - - # NOTE(jbresnah) call stop on each of the servers instead of - # checking the pid file. stop() will wait until the child - # server is dead. This eliminates the possibility of a race - # between a child process listening on a port actually dying - # and a new process being started - servers = [self.glare_server, - self.scrubber_daemon] - for s in servers: - try: - s.stop() - except Exception: - pass - - for f in self.files_to_destroy: - if os.path.exists(f): - os.unlink(f) - - def start_server(self, - server, - expect_launch, - expect_exit=True, - expected_exitcode=0, - **kwargs): - """Starts a server on an unused port. - - Any kwargs passed to this method will override the configuration - value in the conf file used in starting the server. - - :param server: the server to launch - :param expect_launch: true iff the server is expected to - successfully start - :param expect_exit: true iff the launched process is expected - to exit in a timely fashion - :param expected_exitcode: expected exitcode from the launcher - """ - self.cleanup() - - # Start up the requested server - exitcode, out, err = server.start(expect_exit=expect_exit, - expected_exitcode=expected_exitcode, - **kwargs) - if expect_exit: - self.assertEqual(expected_exitcode, exitcode, - "Failed to spin up the requested server. " - "Got: %s" % err) - - self.launched_servers.append(server) - - launch_msg = self.wait_for_servers([server], expect_launch) - self.assertTrue(launch_msg is None, launch_msg) - - def start_with_retry(self, server, port_name, max_retries, - expect_launch=True, - **kwargs): - """Starts a server, with retries if the server launches but - fails to start listening on the expected port. - - :param server: the server to launch - :param port_name: the name of the port attribute - :param max_retries: the maximum number of attempts - :param expect_launch: true iff the server is expected to - successfully start - :param expect_exit: true iff the launched process is expected - to exit in a timely fashion - """ - launch_msg = None - for i in range(max_retries): - exitcode, out, err = server.start(expect_exit=not expect_launch, - **kwargs) - name = server.server_name - self.assertEqual(0, exitcode, - "Failed to spin up the %s server. " - "Got: %s" % (name, err)) - launch_msg = self.wait_for_servers([server], expect_launch) - if launch_msg: - server.stop() - server.bind_port = get_unused_port() - setattr(self, port_name, server.bind_port) - else: - self.launched_servers.append(server) - break - self.assertTrue(launch_msg is None, launch_msg) - - def start_servers(self, **kwargs): - """Starts the Glare server on unused port. - - Any kwargs passed to this method will override the configuration - value in the conf file used in starting the servers. - """ - self.cleanup() - - self.start_with_retry(self.glare_server, 'glare_port', 3, **kwargs) - - if self.include_scrubber: - exitcode, out, err = self.scrubber_daemon.start(**kwargs) - self.assertEqual(0, exitcode, - "Failed to spin up the Scrubber daemon. " - "Got: %s" % err) - - def ping_server(self, port): - """Simple ping on the port. If responsive, return True, else - return False. - - :note We use raw sockets, not ping here, since ping uses ICMP and - has no concept of ports... - """ - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - try: - s.connect(("127.0.0.1", port)) - return True - except socket.error: - return False - finally: - s.close() - - def wait_for_servers(self, servers, expect_launch=True, timeout=30): - """Tight loop, waiting for the given server port(s) to be available. - Returns when all are pingable. There is a timeout on waiting - for the servers to come up. - - :param servers: Glare server ports to ping - :param expect_launch: Optional, true iff the server(s) are - expected to successfully start - :param timeout: Optional, defaults to 30 seconds - :returns: None if launch expectation is met, otherwise an - assertion message - """ - now = datetime.datetime.now() - timeout_time = now + datetime.timedelta(seconds=timeout) - replied = [] - while (timeout_time > now): - pinged = 0 - for server in servers: - if self.ping_server(server.bind_port): - pinged += 1 - if server not in replied: - replied.append(server) - if pinged == len(servers): - msg = 'Unexpected server launch status' - return None if expect_launch else msg - now = datetime.datetime.now() - time.sleep(0.05) - - failed = list(set(servers) - set(replied)) - msg = 'Unexpected server launch status for: ' - for f in failed: - msg += ('%s, ' % f.server_name) - if os.path.exists(f.pid_file): - pid = f.process_pid - trace = f.pid_file.replace('.pid', '.trace') - if self.tracecmd: - cmd = '%s -p %d -o %s' % (self.tracecmd, pid, trace) - try: - execute(cmd, raise_error=False, expect_exit=False) - except OSError as e: - if e.errno == errno.ENOENT: - raise RuntimeError('No executable found for "%s" ' - 'command.' % self.tracecmd) - else: - raise - time.sleep(0.5) - if os.path.exists(trace): - msg += ('\n%s:\n%s\n' % (self.tracecmd, - open(trace).read())) - - self.add_log_details(failed) - - return msg if expect_launch else None - - def stop_server(self, server): - """Called to stop a single server in a normal fashion. - - :param server: the server to stop - """ - # Spin down the requested server - server.stop() - - def stop_servers(self): - self.stop_server(self.glare_server) - - if self.include_scrubber: - self.stop_server(self.scrubber_daemon) - - self._reset_database(self.glare_server.sql_connection) - - def run_sql_cmd(self, sql): - """Provides a crude mechanism to run manual SQL commands - for backend DB verification within the functional tests. - The raw result set is returned. - """ - engine = db_api.get_engine() - return engine.execute(sql) - - def copy_data_file(self, file_name, dst_dir): - src_file_name = os.path.join('glare/tests/etc', file_name) - shutil.copy(src_file_name, dst_dir) - dst_file_name = os.path.join(dst_dir, file_name) - return dst_file_name - - def add_log_details(self, servers=None): - logs = [s.log_file for s in (servers or self.launched_servers)] - for log in logs: - if os.path.exists(log): - testtools.content.attach_file(self, log) - - -class TestRouter(router.API): - def _get_artifacts_resource(self): - deserializer = resource.RequestDeserializer() - serializer = resource.ResponseSerializer() - controller = resource.ArtifactsController() - return wsgi.Resource(controller, deserializer, serializer) diff --git a/glare/tests/functional/base.py b/glare/tests/functional/base.py deleted file mode 100644 index af54f84..0000000 --- a/glare/tests/functional/base.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from oslo_serialization import jsonutils -from oslo_utils import uuidutils -import requests - -from glare.tests import functional - - -def sort_results(lst, target='name'): - return sorted(lst, key=lambda x: x[target]) - - -class TestArtifact(functional.FunctionalTest): - enabled_types = (u'sample_artifact', u'images', u'heat_templates', - u'heat_environments', u'tosca_templates', - u'murano_packages', u'all') - - users = { - 'user1': { - 'id': uuidutils.generate_uuid(), - 'tenant_id': uuidutils.generate_uuid(), - 'token': uuidutils.generate_uuid(), - 'role': 'member' - }, - 'user2': { - 'id': uuidutils.generate_uuid(), - 'tenant_id': uuidutils.generate_uuid(), - 'token': uuidutils.generate_uuid(), - 'role': 'member' - }, - 'admin': { - 'id': uuidutils.generate_uuid(), - 'tenant_id': uuidutils.generate_uuid(), - 'token': uuidutils.generate_uuid(), - 'role': 'admin' - }, - 'anonymous': { - 'id': None, - 'tenant_id': None, - 'token': None, - 'role': None - } - } - - def setUp(self): - super(TestArtifact, self).setUp() - - self.set_user('user1') - self.glare_server.deployment_flavor = 'noauth' - - self.glare_server.enabled_artifact_types = ','.join( - self.enabled_types) - self.glare_server.custom_artifact_types_modules = ( - 'glare.tests.sample_artifact') - self.start_servers(**self.__dict__.copy()) - - def tearDown(self): - self.stop_servers() - self._reset_database(self.glare_server.sql_connection) - super(TestArtifact, self).tearDown() - - def _url(self, path): - if 'schemas' in path: - return 'http://127.0.0.1:%d%s' % (self.glare_port, path) - else: - return 'http://127.0.0.1:%d/artifacts%s' % (self.glare_port, path) - - def set_user(self, username): - if username not in self.users: - raise KeyError - self.current_user = username - - def _headers(self, custom_headers=None): - base_headers = { - 'X-Identity-Status': 'Confirmed', - 'X-Auth-Token': self.users[self.current_user]['token'], - 'X-User-Id': self.users[self.current_user]['id'], - 'X-Tenant-Id': self.users[self.current_user]['tenant_id'], - 'X-Project-Id': self.users[self.current_user]['tenant_id'], - 'X-Roles': self.users[self.current_user]['role'], - } - base_headers.update(custom_headers or {}) - return base_headers - - def create_artifact(self, data=None, status=201, - type_name='sample_artifact'): - return self.post('/' + type_name, data or {}, status=status) - - def _check_artifact_method(self, method, url, data=None, status=200, - headers=None): - if not headers: - headers = self._headers() - else: - headers = self._headers(headers) - headers.setdefault("Content-Type", "application/json") - if 'application/json' in headers['Content-Type'] and data is not None: - data = jsonutils.dumps(data) - response = getattr(requests, method)(self._url(url), headers=headers, - data=data) - self.assertEqual(status, response.status_code, response.text) - if status >= 400: - return response.text - if ("application/json" in response.headers["content-type"] or - "application/schema+json" in response.headers["content-type"]): - return jsonutils.loads(response.text) - return response.text - - def post(self, url, data=None, status=201, headers=None): - return self._check_artifact_method("post", url, data, status=status, - headers=headers) - - def get(self, url, status=200, headers=None): - return self._check_artifact_method("get", url, status=status, - headers=headers) - - def delete(self, url, status=204): - response = requests.delete(self._url(url), headers=self._headers()) - self.assertEqual(status, response.status_code, response.text) - return response.text - - def patch(self, url, data, status=200, headers=None): - if headers is None: - headers = {} - if 'Content-Type' not in headers: - headers.update({'Content-Type': 'application/json-patch+json'}) - return self._check_artifact_method("patch", url, data, status=status, - headers=headers) - - def put(self, url, data=None, status=200, headers=None): - return self._check_artifact_method("put", url, data, status=status, - headers=headers) - - # the test cases below are written in accordance with use cases - # each test tries to cover separate use case in Glare - # all code inside each test tries to cover all operators and data - # involved in use case execution - # each tests represents part of artifact lifecycle - # so we can easily define where is the failed code - - make_active = [{"op": "replace", "path": "/status", "value": "active"}] - make_deactivated = [{"op": "replace", "path": "/status", - "value": "deactivated"}] - make_public = [{"op": "replace", "path": "/visibility", "value": "public"}] - - def admin_action(self, artifact_id, body, status=200, - type_name='sample_artifact'): - cur_user = self.current_user - self.set_user('admin') - url = '/%s/%s' % (type_name, artifact_id) - af = self.patch(url=url, data=body, status=status) - self.set_user(cur_user) - return af diff --git a/glare/tests/functional/test_all.py b/glare/tests/functional/test_all.py deleted file mode 100644 index e27af72..0000000 --- a/glare/tests/functional/test_all.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from glare.tests.functional import base - - -class TestAll(base.TestArtifact): - - def test_all(self): - for type_name in self.enabled_types: - if type_name == 'all': - continue - for i in range(3): - for j in range(3): - self.create_artifact( - data={'name': '%s_%d' % (type_name, i), - 'version': '%d' % j, - 'tags': ['tag%s' % i]}, - type_name=type_name) - - # get all possible artifacts - url = '/all?sort=name:asc&limit=100' - res = self.get(url=url, status=200)['all'] - from pprint import pformat - self.assertEqual(54, len(res), pformat(res)) - - # get artifacts with latest versions - url = '/all?version=latest&sort=name:asc' - res = self.get(url=url, status=200)['all'] - self.assertEqual(18, len(res)) - for art in res: - self.assertEqual('2.0.0', art['version']) - - # get images only - url = '/all?type_name=images&sort=name:asc' - res = self.get(url=url, status=200)['all'] - self.assertEqual(9, len(res)) - for art in res: - self.assertEqual('images', art['type_name']) - - # get images and heat_templates - url = '/all?type_name=in:images,heat_templates&sort=name:asc' - res = self.get(url=url, status=200)['all'] - self.assertEqual(18, len(res)) - for art in res: - self.assertIn(art['type_name'], ('images', 'heat_templates')) - - def test_all_readonlyness(self): - self.create_artifact(data={'name': 'all'}, type_name='all', status=403) - art = self.create_artifact(data={'name': 'image'}, type_name='images') - - url = '/all/%s' % art['id'] - - # update 'all' is forbidden - data = [{ - "op": "replace", - "path": "/description", - "value": "text" - }] - self.patch(url=url, data=data, status=403) - - # activation is forbidden - data = [{ - "op": "replace", - "path": "/status", - "value": "active" - }] - self.patch(url=url, data=data, status=403) - - # publishing is forbidden - data = [{ - "op": "replace", - "path": "/visibility", - "value": "public" - }] - self.patch(url=url, data=data, status=403) - - # get is okay - new_art = self.get(url=url) - self.assertEqual(new_art['id'], art['id']) diff --git a/glare/tests/functional/test_database_store.py b/glare/tests/functional/test_database_store.py deleted file mode 100644 index 9ac3f78..0000000 --- a/glare/tests/functional/test_database_store.py +++ /dev/null @@ -1,172 +0,0 @@ -# Copyright 2017 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import hashlib -import requests - -from glare.tests.functional import base - - -class TestMultiStore(base.TestArtifact): - enabled_types = (u'sample_artifact:database',) - - def test_blob_dicts(self): - # Getting empty artifact list - url = '/sample_artifact' - response = self.get(url=url, status=200) - expected = {'first': '/artifacts/sample_artifact', - 'sample_artifact': [], - 'schema': '/schemas/sample_artifact'} - self.assertEqual(expected, response) - - # Create a test artifact - art = self.create_artifact(status=201, - data={'name': 'test', - 'version': '1.0', - 'string_required': '123'}) - self.assertIsNotNone(art['id']) - - # Get the artifact which should have a generated id and status - # 'drafted' - url = '/sample_artifact/%s' % art['id'] - art_1 = self.get(url=url, status=200) - self.assertIsNotNone(art_1['id']) - self.assertEqual('drafted', art_1['status']) - - # Upload data to blob dict - headers = {'Content-Type': 'application/octet-stream'} - data = "data" * 100 - - self.put(url=url + '/dict_of_blobs/new_blob', - data=data, status=200, headers=headers) - - # Download data from blob dict - self.assertEqual(data, - self.get(url=url + '/dict_of_blobs/new_blob', - status=200)) - - # download blob from undefined dict property - self.get(url=url + '/not_a_dict/not_a_blob', status=400) - - def test_blob_upload(self): - # create artifact with blob - data = 'data' - self.create_artifact( - data={'name': 'test_af', 'blob': data, - 'version': '0.0.1'}, status=400) - art = self.create_artifact(data={'name': 'test_af', - 'version': '0.0.1', - 'string_required': 'test'}) - url = '/sample_artifact/%s' % art['id'] - headers = {'Content-Type': 'application/octet-stream'} - - # upload to non-existing property - self.put(url=url + '/blob_non_exist', data=data, status=400, - headers=headers) - - # upload too big value - big_data = "this is the smallest big data" - self.put(url=url + '/small_blob', data=big_data, status=413, - headers=headers) - # upload correct blob value - self.put(url=url + '/small_blob', data=big_data[:2], headers=headers) - - # Upload artifact via different user - self.set_user('user2') - self.put(url=url + '/blob', data=data, status=404, - headers=headers) - - # Upload file to the artifact - self.set_user('user1') - art = self.put(url=url + '/blob', data=data, status=200, - headers=headers) - self.assertEqual('active', art['blob']['status']) - self.assertEqual('application/octet-stream', - art['blob']['content_type']) - self.assertIn('url', art['blob']) - self.assertNotIn('id', art['blob']) - - # reUpload file to artifact - self.put(url=url + '/blob', data=data, status=409, - headers=headers) - # upload blob dict - self.put(url + '/dict_of_blobs/test_key', data=data, headers=headers) - # test re-upload failed - self.put(url + '/dict_of_blobs/test_key', data=data, headers=headers, - status=409) - - # upload few other blobs to the dict - for elem in ('aaa', 'bbb', 'ccc', 'ddd'): - self.put(url + '/dict_of_blobs/' + elem, data=data, - headers=headers) - - # upload to active artifact - self.patch(url, self.make_active) - self.put(url + '/dict_of_blobs/key2', data=data, status=403, - headers=headers) - - self.delete(url) - - def test_blob_download(self): - data = 'some_arbitrary_testing_data' - art = self.create_artifact(data={'name': 'test_af', - 'version': '0.0.1'}) - url = '/sample_artifact/%s' % art['id'] - - # download not uploaded blob - self.get(url=url + '/blob', status=404) - - # download blob from not existing artifact - self.get(url=url + '1/blob', status=404) - - # download blob from undefined property - self.get(url=url + '/not_a_blob', status=400) - - headers = {'Content-Type': 'application/octet-stream'} - art = self.put(url=url + '/blob', data=data, status=200, - headers=headers) - self.assertEqual('active', art['blob']['status']) - md5 = hashlib.md5(data.encode('UTF-8')).hexdigest() - sha1 = hashlib.sha1(data.encode('UTF-8')).hexdigest() - sha256 = hashlib.sha256(data.encode('UTF-8')).hexdigest() - self.assertEqual(md5, art['blob']['md5']) - self.assertEqual(sha1, art['blob']['sha1']) - self.assertEqual(sha256, art['blob']['sha256']) - - # check that content-length is in response - response = requests.get(self._url(url + '/blob'), - headers=self._headers()) - self.assertEqual('27', response.headers["content-length"]) - - # check that all checksums are in response - self.assertEqual('0825587cc011b7e76381b65e19d5ec27', - response.headers["Content-MD5"]) - self.assertEqual('89eb4b969b721ba8c3aff18ad7d69454f651a697', - response.headers["X-Openstack-Glare-Content-SHA1"]) - self.assertEqual('bbfd48c7ec792fc462e58232d4d9f407' - 'ecefb75cc9e9823336166556b499ea4d', - response.headers["X-Openstack-Glare-Content-SHA256"]) - - blob_data = self.get(url=url + '/blob') - self.assertEqual(data, blob_data) - - # download artifact via admin - self.set_user('admin') - blob_data = self.get(url=url + '/blob') - self.assertEqual(data, blob_data) - - # try to download blob via different user - self.set_user('user2') - self.get(url=url + '/blob', status=404) diff --git a/glare/tests/functional/test_sample_artifact.py b/glare/tests/functional/test_sample_artifact.py deleted file mode 100644 index 2669744..0000000 --- a/glare/tests/functional/test_sample_artifact.py +++ /dev/null @@ -1,2310 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import hashlib -import uuid - -from oslo_serialization import jsonutils -import requests - -from glare.tests.functional import base - - -def sort_results(lst, target='name'): - return sorted(lst, key=lambda x: x[target]) - - -class TestList(base.TestArtifact): - def test_list_marker_and_limit(self): - # Create artifacts - art_list = [self.create_artifact({'name': 'name%s' % i, - 'version': '1.0', - 'tags': ['tag%s' % i], - 'int1': 1024 + i, - 'float1': 123.456, - 'str1': 'bugaga', - 'bool1': True}) - for i in range(5)] - - # sort by 'next' url - url = '/sample_artifact?limit=1&sort=int1:asc,name:desc' - result = self.get(url=url) - self.assertEqual([art_list[0]], result['sample_artifact']) - marker = result['next'] - result = self.get(url=marker[10:]) - self.assertEqual([art_list[1]], result['sample_artifact']) - - # sort by custom marker - url = '/sample_artifact?sort=int1:asc&marker=%s' % art_list[1]['id'] - result = self.get(url=url) - self.assertEqual(art_list[2:], result['sample_artifact']) - url = '/sample_artifact?sort=int1:desc&marker=%s' % art_list[1]['id'] - result = self.get(url=url) - self.assertEqual(art_list[:1], result['sample_artifact']) - url = '/sample_artifact' \ - '?sort=float1:asc,name:desc&marker=%s' % art_list[1]['id'] - result = self.get(url=url) - self.assertEqual([art_list[0]], result['sample_artifact']) - - # paginate by name in desc order with limit 2 - url = '/sample_artifact?limit=2&sort=name:desc' - result = self.get(url=url) - self.assertEqual(art_list[4:2:-1], result['sample_artifact']) - marker = result['next'] - result = self.get(url=marker[10:]) - self.assertEqual(art_list[2:0:-1], result['sample_artifact']) - marker = result['next'] - result = self.get(url=marker[10:]) - self.assertEqual([art_list[0]], result['sample_artifact']) - - def test_list_base_filters(self): - # Create artifact - art_list = [self.create_artifact({'name': 'name%s' % i, - 'version': '1.0', - 'tags': ['tag%s' % i], - 'int1': 1024, - 'float1': 123.456, - 'str1': 'bugaga', - 'bool1': True}) - for i in range(5)] - - public_art = self.create_artifact({'name': 'name5', - 'version': '1.0', - 'tags': ['tag4', 'tag5'], - 'int1': 2048, - 'float1': 987.654, - 'str1': 'lalala', - 'bool1': False, - 'string_required': '123'}) - url = '/sample_artifact/%s' % public_art['id'] - data = [{ - "op": "replace", - "path": "/status", - "value": "active" - }] - self.patch(url=url, data=data, status=200) - public_art = self.admin_action(public_art['id'], self.make_public) - - art_list.append(public_art) - - art_list.sort(key=lambda x: x['name']) - - url = '/sample_artifact?str1=bla:empty' - self.get(url=url, status=400) - - url = '/sample_artifact?str1=bla:empty' - self.get(url=url, status=400) - - url = '/sample_artifact?name=name0' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual([art_list[0]], result) - - url = '/sample_artifact?tags=tag4' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[4:], result) - - url = '/sample_artifact?name=eq:name0' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:1], result) - - url = '/sample_artifact?str1=eq:bugaga' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:5], result) - - url = '/sample_artifact?int1=eq:2048' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[5:], result) - - url = '/sample_artifact?float1=eq:123.456' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:5], result) - - url = '/sample_artifact?name=neq:name0' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[1:], result) - - url = '/sample_artifact?name=in:name,name0' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:1], result) - - url = '/sample_artifact?name=in:not_exist,name0' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:1], result) - - url = '/sample_artifact?name=not_exist' - result = self.get(url=url)['sample_artifact'] - self.assertEqual([], result) - - url = '/sample_artifact?name=bla:name1' - self.get(url=url, status=400) - - url = '/sample_artifact?name=' - self.get(url=url, status=400) - - url = '/sample_artifact?name=eq:' - self.get(url=url, status=400) - - url = '/sample_artifact?tags=tag4,tag5' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[5:], result) - - url = '/sample_artifact?tags-any=tag4' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[4:], result) - - url = '/sample_artifact?tags=tag4,tag_not_exist,tag5' - result = self.get(url=url)['sample_artifact'] - self.assertEqual([], result) - - url = '/sample_artifact?tags-any=tag4,tag_not_exist,tag5' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[4:], result) - - url = '/sample_artifact?tags=tag_not_exist,tag_not_exist_1' - result = self.get(url=url)['sample_artifact'] - self.assertEqual([], result) - - url = '/sample_artifact?tags' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list, result) - - url = '/sample_artifact?tags=' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list, result) - - url = '/sample_artifact?tags=eq:tag0' - self.get(url=url, status=400) - - url = '/sample_artifact?tags=bla:tag0' - self.get(url=url, status=400) - - url = '/sample_artifact?tags=neq:tag1' - self.get(url=url, status=400) - - url = '/sample_artifact?visibility=private' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:5], result) - - url = '/sample_artifact?visibility=public' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[5:], result) - - url = '/sample_artifact?visibility=eq:private' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:5], result) - - url = '/sample_artifact?visibility=eq:public' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[5:], result) - - url = '/sample_artifact?visibility=neq:private' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[5:], result) - - url = '/sample_artifact?visibility=neq:public' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:5], result) - - url = '/sample_artifact?visibility=blabla' - self.get(url=url, status=400) - - url = '/sample_artifact?visibility=neq:blabla' - self.get(url=url, status=400) - - url = '/sample_artifact?name=eq:name0&name=name1&tags=tag1' - result = self.get(url=url)['sample_artifact'] - self.assertEqual([], result) - - url = '/sample_artifact?int1=gt:2000' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[5:], result) - - url = '/sample_artifact?int1=lte:1024' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:5], result) - - url = '/sample_artifact?int1=gt:1000&int1=lt:2000' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:5], result) - - url = '/sample_artifact?int1=lt:2000' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:5], result) - - url = '/sample_artifact?float1=gt:200.000' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[5:], result) - - url = '/sample_artifact?float1=gt:100.00&float1=lt:200.00' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:5], result) - - url = '/sample_artifact?float1=lt:200.00' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:5], result) - - url = '/sample_artifact?float1=lt:200' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:5], result) - - url = '/sample_artifact?float1=lte:123.456' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:5], result) - - url = '/sample_artifact?bool1=True' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:5], result) - - url = '/sample_artifact?bool1=False' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[5:], result) - - def test_artifact_list_dict_filters(self): - lists_of_str = [ - ['aaa', 'bbb', 'ccc'], - ['aaa', 'bbb'], - ['aaa', 'ddd'], - ['bbb'], - ['ccc'] - ] - dicts_of_str = [ - {'aaa': 'z', 'bbb': 'z', 'ccc': 'z'}, - {'aaa': 'z', 'bbb': 'z'}, - {'aaa': 'z', 'ddd': 'z'}, - {'bbb': 'z'}, - {'ccc': 'z'} - ] - art_list = [self.create_artifact({'name': 'name%s' % i, - 'version': '1.0', - 'tags': ['tag%s' % i], - 'int1': 1024, - 'float1': 123.456, - 'str1': 'bugaga', - 'bool1': True, - 'list_of_str': lists_of_str[i], - 'dict_of_str': dicts_of_str[i]}) - for i in range(5)] - - # test list filters - url = '/sample_artifact?list_of_str=aaa&sort=name' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:3], result) - - url = '/sample_artifact?list_of_str=ccc&sort=name' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual([art_list[0], art_list[4]], result) - - url = '/sample_artifact?list_of_str=eee&sort=name' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual([], result) - - # test dict filters - url = '/sample_artifact?dict_of_str=aaa&sort=name' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:3], result) - - url = '/sample_artifact?dict_of_str=ccc&sort=name' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual([art_list[0], art_list[4]], result) - - url = '/sample_artifact?dict_of_str=eee&sort=name' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual([], result) - - def test_list_dict_prop_filters(self): - # Create artifact - art_list = [self.create_artifact({'name': 'name0', - 'version': '1.0', - 'dict_of_str': {'pr1': 'val1'}}), - self.create_artifact({'name': 'name1', - 'version': '1.0', - 'dict_of_str': {'pr1': 'val1', - 'pr2': 'val2'}}), - self.create_artifact({'name': 'name2', - 'version': '1.0', - 'dict_of_str': {'pr3': 'val3'}}), - self.create_artifact({'name': 'name3', - 'version': '1.0', - 'dict_of_str': {'pr3': 'val1'}, - 'dict_of_int': {"1": 10, "2": 20}}), - self.create_artifact({'name': 'name4', - 'version': '1.0', - 'dict_of_str': {}, - 'dict_of_int': {"2": 20, "3": 30}}), - ] - - art_list.sort(key=lambda x: x['name']) - - url = '/sample_artifact?dict_of_str.pr1=val1' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:2], result) - - url = '/sample_artifact?dict_of_int.1=10' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[3:4], result) - - url = '/sample_artifact?dict_of_str.pr1=val999' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual([], result) - - url = '/sample_artifact?dict_of_str.pr1=eq:val1' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual(art_list[:2], result) - - url = '/sample_artifact?dict_of_str.' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual([], result) - - for op in ['gt', 'gte', 'lt', 'lte']: - url = '/sample_artifact?dict_of_str.pr3=%s:val3' % op - self.get(url=url, status=400) - - url = '/sample_artifact?dict_of_str.pr3=blabla:val3' - self.get(url=url, status=400) - - url = '/sample_artifact?dict_of_str.pr1=' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual([], result) - - url = '/sample_artifact?dict_of_str.pr1=' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual([], result) - - url = '/sample_artifact?dict_of_str' - self.assertEqual([], result) - - url = '/sample_artifact?dict_of_str.pr3=blabla:val3' - self.get(url=url, status=400) - - url = '/sample_artifact?list_of_str.pr3=blabla:val3' - self.get(url=url, status=400) - - url = '/sample_artifact?dict_of_str.bla=val1' - result = sort_results(self.get(url=url)['sample_artifact']) - self.assertEqual([], result) - - url = '/sample_artifact?dict_of_int.1=lala' - self.get(url=url, status=400) - - def test_list_sorted(self): - art_list = [self.create_artifact({'name': 'name%s' % i, - 'version': '1.0', - 'tags': ['tag%s' % i], - 'int1': i, - 'float1': 123.456 + (-0.9) ** i, - 'str1': 'bugaga', - 'bool1': True, - 'list_of_int': [11, 22, - i], - 'dict_of_int': {'one': 4 * i, - 'two': (-2) ** i}}) - for i in range(5)] - - # sorted by string 'asc' - url = '/sample_artifact?sort=name:asc' - result = self.get(url=url) - expected = sort_results(art_list) - self.assertEqual(expected, result['sample_artifact']) - - # sorted by string 'desc' - url = '/sample_artifact?sort=name:desc' - result = self.get(url=url) - expected = sort_results(art_list) - expected.reverse() - self.assertEqual(expected, result['sample_artifact']) - - # sorted by int 'asc' - url = '/sample_artifact?sort=int1:asc' - result = self.get(url=url) - expected = sort_results(art_list, target='int1') - self.assertEqual(expected, result['sample_artifact']) - - # sorted by int 'desc' - url = '/sample_artifact?sort=int1:desc' - result = self.get(url=url) - expected = sort_results(art_list, target='int1') - expected.reverse() - self.assertEqual(expected, result['sample_artifact']) - - # sorted by float 'asc' - url = '/sample_artifact?sort=float1:asc' - result = self.get(url=url) - expected = sort_results(art_list, target='float1') - self.assertEqual(expected, result['sample_artifact']) - - # sorted by float 'desc' - url = '/sample_artifact?sort=float1:desc' - result = self.get(url=url) - expected = sort_results(art_list, target='float1') - expected.reverse() - self.assertEqual(expected, result['sample_artifact']) - - # sorted by unsorted 'asc' - url = '/sample_artifact?sort=bool1:asc' - self.get(url=url, status=400) - - # sorted by unsorted 'desc' - url = '/sample_artifact?sort=bool1:desc' - self.get(url=url, status=400) - - # sorted by non-existent 'asc' - url = '/sample_artifact?sort=non_existent:asc' - self.get(url=url, status=400) - - # sorted by non-existent 'desc' - url = '/sample_artifact?sort=non_existent:desc' - self.get(url=url, status=400) - - # sorted by invalid op - url = '/sample_artifact?sort=name:invalid_op' - self.get(url=url, status=400) - - # sorted without op - url = '/sample_artifact?sort=name' - result = self.get(url=url) - expected = sort_results(art_list) - expected.reverse() - self.assertEqual(expected, result['sample_artifact']) - - # sorted by list - url = '/sample_artifact?sort=list_of_int:asc' - self.get(url=url, status=400) - - # sorted by dict - url = '/sample_artifact?sort=dict_of_int:asc' - self.get(url=url, status=400) - - # sorted by element of dict - url = '/sample_artifact?sort=dict_of_int.one:asc' - self.get(url=url, status=400) - - # sorted by any prop - url = '/sample_artifact?sort=name:asc,int1:desc' - result = self.get(url=url) - expected = sort_results(sort_results(art_list), target='int1') - self.assertEqual(expected, result['sample_artifact']) - - def test_list_versions(self): - # Create artifacts with versions - version_list = ['1.0', '1.1', '2.0.0', '2.0.1-beta', '2.0.1', '20.0'] - - # Create artifact - art_list = [self.create_artifact({'name': 'name', - 'version': version_list[i - 1], - 'tags': ['tag%s' % i], - 'int1': 2048, - 'float1': 123.456, - 'str1': 'bugaga', - 'bool1': True}) - for i in range(1, 7)] - - public_art = self.create_artifact( - {'name': 'name', - 'tags': ['tag4', 'tag5'], - 'int1': 1024, - 'float1': 987.654, - 'str1': 'lalala', - 'bool1': False, - 'string_required': '123'}) - url = '/sample_artifact/%s' % public_art['id'] - data = [{ - "op": "replace", - "path": "/status", - "value": "active" - }] - self.patch(url=url, data=data, status=200) - public_art = self.admin_action(public_art['id'], self.make_public) - - art_list.insert(0, public_art) - - expected_result = sort_results(art_list, target='version') - url = '/sample_artifact' - result = sort_results(self.get(url=url)['sample_artifact'], - target='version') - self.assertEqual(expected_result, result) - - # Creating an artifact with existing version fails - self.create_artifact( - {'name': 'name', - 'version': '1.0', - 'tags': ['tag1'], - 'int1': 2048, - 'float1': 123.456, - 'str1': 'bugaga', - 'bool1': True}, - status=409) - - url = '/sample_artifact?name=name&version=gte:2.0.0' - result = sort_results(self.get(url=url)['sample_artifact'], - target='version') - self.assertEqual(expected_result[3:], result) - - url = ('/sample_artifact?' - 'name=name&version=gte:1.1&version=lt:2.0.1-beta') - result = sort_results(self.get(url=url)['sample_artifact'], - target='version') - self.assertEqual(expected_result[2:4], result) - - # Filtering by version without name is ok - url = '/sample_artifact?version=gte:2.0.0' - self.get(url=url, status=200) - - # Several name filters with version is ok - url = '/sample_artifact?name=name&name=anothername&version=gte:2.0.0' - self.get(url=url, status=200) - - # Filtering by version with name filter op different from 'eq' - url = '/sample_artifact?version=gte:2.0.0&name=neq:name' - self.get(url=url, status=200) - - # Sorting by version 'asc' - url = '/sample_artifact?name=name&sort=version:asc' - result = self.get(url=url)['sample_artifact'] - self.assertEqual(art_list, result) - - # Sorting by version 'desc' - url = '/sample_artifact?name=name&sort=version:desc' - result = self.get(url=url)['sample_artifact'] - self.assertEqual(list(reversed(art_list)), result) - - def test_list_latest_filter(self): - # Create artifacts with versions - group1_versions = ['1.0', '20.0', '2.0.0', '2.0.1-beta', '2.0.1'] - group2_versions = ['1', '1000.0.1-beta', '99.0', - '1000.0.1-alpha', '1000.0.1'] - - for i in range(5): - self.create_artifact( - {'name': 'group1', - 'version': group1_versions[i], - 'tags': ['tag%s' % i], - 'int1': 2048 + i, - 'float1': 123.456, - 'str1': 'bugaga', - "string_required": "test_str", - 'bool1': True}) - self.create_artifact( - {'name': 'group2', - 'version': group2_versions[i], - 'tags': ['tag%s' % i], - 'int1': 2048 + i, - 'float1': 123.456, - 'str1': 'bugaga', - "string_required": "test_str", - 'bool1': True}) - - url = '/sample_artifact?version=latest&sort=name:asc' - res = self.get(url=url, status=200)['sample_artifact'] - self.assertEqual(2, len(res)) - self.assertEqual('20.0.0', res[0]['version']) - self.assertEqual('1000.0.1', res[1]['version']) - - self.patch('/sample_artifact/' + res[0]['id'], self.make_active) - - url = '/sample_artifact?version=latest&sort=name:asc&status=drafted' - res = self.get(url=url, status=200)['sample_artifact'] - self.assertEqual(2, len(res)) - self.assertEqual('2.0.1', res[0]['version']) - self.assertEqual('1000.0.1', res[1]['version']) - - url = '/sample_artifact?version=latest&sort=name:asc&int1=2050' - res = self.get(url=url, status=200)['sample_artifact'] - self.assertEqual(2, len(res)) - self.assertEqual('2.0.0', res[0]['version']) - self.assertEqual('99.0.0', res[1]['version']) - - url = '/sample_artifact?version=latest&name=group1' - res = self.get(url=url, status=200)['sample_artifact'] - self.assertEqual(1, len(res)) - self.assertEqual('20.0.0', res[0]['version']) - - url = '/sample_artifact?version=latest&name=group2' - res = self.get(url=url, status=200)['sample_artifact'] - self.assertEqual(1, len(res)) - self.assertEqual('1000.0.1', res[0]['version']) - - def test_list_support_unicode_filters(self): - unicode_text = u'\u041f\u0420\u0418\u0412\u0415\u0422' - art1 = self.create_artifact(data={'name': unicode_text}) - self.assertEqual(unicode_text, art1['name']) - - mixed_text = u'la\u041f' - art2 = self.create_artifact(data={'name': mixed_text}) - self.assertEqual(mixed_text, art2['name']) - - headers = {'Content-Type': 'text/html; charset=UTF-8'} - url = u'/sample_artifact?name=\u041f\u0420\u0418\u0412\u0415\u0422' - response_url = u'/artifacts/sample_artifact?name=' \ - u'%D0%9F%D0%A0%D0%98%D0%92%D0%95%D0%A2' - result = self.get(url=url, headers=headers) - self.assertEqual(art1, result['sample_artifact'][0]) - self.assertEqual(response_url, result['first']) - - -class TestBlobs(base.TestArtifact): - def test_blob_dicts(self): - # Getting empty artifact list - url = '/sample_artifact' - response = self.get(url=url, status=200) - expected = {'first': '/artifacts/sample_artifact', - 'sample_artifact': [], - 'schema': '/schemas/sample_artifact'} - self.assertEqual(expected, response) - - # Create a test artifact - art = self.create_artifact(status=201, - data={'name': 'test', - 'version': '1.0', - 'string_required': '123'}) - self.assertIsNotNone(art['id']) - - # Get the artifact which should have a generated id and status - # 'drafted' - url = '/sample_artifact/%s' % art['id'] - art_1 = self.get(url=url, status=200) - self.assertIsNotNone(art_1['id']) - self.assertEqual('drafted', art_1['status']) - - # Upload data to blob dict - headers = {'Content-Type': 'application/octet-stream'} - data = "data" * 100 - blob_name = 'blob_name' * 100 - self.put(url=url + '/dict_of_blobs/' + blob_name, - data=data, status=200, headers=headers) - - # Download data from blob dict - self.assertEqual(data, - self.get(url=url + '/dict_of_blobs/' + blob_name, - status=200)) - - # Download blob from undefined dict property - self.get(url=url + '/not_a_dict/not_a_blob', status=400) - - # Blob url is generated right - art = self.get(url=url, status=200) - exp_blob_url = '/artifacts' + url + '/dict_of_blobs/' + blob_name - self.assertEqual(exp_blob_url, - art['dict_of_blobs'][blob_name]['url']) - - def test_blob_upload(self): - # create artifact with blob - data = 'data' - self.create_artifact( - data={'name': 'test_af', 'blob': data, - 'version': '0.0.1'}, status=400) - art = self.create_artifact(data={'name': 'test_af', - 'version': '0.0.1', - 'string_required': 'test'}) - url = '/sample_artifact/%s' % art['id'] - headers = {'Content-Type': 'application/octet-stream'} - - # upload to non-existing property - self.put(url=url + '/blob_non_exist', data=data, status=400, - headers=headers) - - # upload too big value - big_data = "this is the smallest big data" - self.put(url=url + '/small_blob', data=big_data, status=413, - headers=headers) - # upload correct blob value - self.put(url=url + '/small_blob', data=big_data[:2], headers=headers) - - # Upload artifact via different user - self.set_user('user2') - self.put(url=url + '/blob', data=data, status=404, - headers=headers) - - # Upload file to the artifact - self.set_user('user1') - art = self.put(url=url + '/blob', data=data, status=200, - headers=headers) - self.assertEqual('active', art['blob']['status']) - self.assertEqual('application/octet-stream', - art['blob']['content_type']) - self.assertIn('url', art['blob']) - self.assertNotIn('id', art['blob']) - - # Blob url is generated right - exp_blob_url = '/artifacts' + url + '/blob' - self.assertEqual(exp_blob_url, art['blob']['url']) - - # reUpload file to artifact - self.put(url=url + '/blob', data=data, status=409, - headers=headers) - # upload blob dict - self.put(url + '/dict_of_blobs/test_key', data=data, headers=headers) - # test re-upload failed - self.put(url + '/dict_of_blobs/test_key', data=data, headers=headers, - status=409) - - # upload few other blobs to the dict - for elem in ('aaa', 'bbb', 'ccc', 'ddd'): - self.put(url + '/dict_of_blobs/' + elem, data=data, - headers=headers) - - # upload to active artifact - self.patch(url, self.make_active) - self.put(url + '/dict_of_blobs/key2', data=data, status=403, - headers=headers) - - self.delete(url) - - def test_blob_download(self): - data = 'some_arbitrary_testing_data' - art = self.create_artifact(data={'name': 'test_af', - 'version': '0.0.1'}) - url = '/sample_artifact/%s' % art['id'] - - # download not uploaded blob - self.get(url=url + '/blob', status=404) - - # download blob from not existing artifact - self.get(url=url + '1/blob', status=404) - - # download blob from undefined property - self.get(url=url + '/not_a_blob', status=400) - - headers = {'Content-Type': 'application/octet-stream'} - art = self.put(url=url + '/blob', data=data, status=200, - headers=headers) - self.assertEqual('active', art['blob']['status']) - md5 = hashlib.md5(data.encode('UTF-8')).hexdigest() - sha1 = hashlib.sha1(data.encode('UTF-8')).hexdigest() - sha256 = hashlib.sha256(data.encode('UTF-8')).hexdigest() - self.assertEqual(md5, art['blob']['md5']) - self.assertEqual(sha1, art['blob']['sha1']) - self.assertEqual(sha256, art['blob']['sha256']) - - # check that content-length is in response - response = requests.get(self._url(url + '/blob'), - headers=self._headers()) - self.assertEqual('27', response.headers["content-length"]) - - # check that all checksums are in response - response = requests.get(self._url(url + '/blob'), - headers=self._headers()) - self.assertEqual('0825587cc011b7e76381b65e19d5ec27', - response.headers["Content-MD5"]) - self.assertEqual('89eb4b969b721ba8c3aff18ad7d69454f651a697', - response.headers["X-Openstack-Glare-Content-SHA1"]) - self.assertEqual('bbfd48c7ec792fc462e58232d4d9f407' - 'ecefb75cc9e9823336166556b499ea4d', - response.headers["X-Openstack-Glare-Content-SHA256"]) - - blob_data = self.get(url=url + '/blob') - self.assertEqual(data, blob_data) - - # download artifact via admin - self.set_user('admin') - blob_data = self.get(url=url + '/blob') - self.assertEqual(data, blob_data) - - # try to download blob via different user - self.set_user('user2') - self.get(url=url + '/blob', status=404) - - def test_blob_add_custom_location(self): - # Create artifact - art = self.create_artifact({'name': 'name5', - 'version': '1.0', - 'tags': ['tag1', 'tag2', 'tag3'], - 'int1': 2048, - 'float1': 987.654, - 'str1': 'lalala', - 'bool1': False, - 'string_required': '123'}) - self.assertIsNotNone(art['id']) - - # Set custom location - url = '/sample_artifact/%s' % art['id'] - body = jsonutils.dumps( - {'url': 'https://www.apache.org/licenses/LICENSE-2.0.txt', - 'md5': "fake", 'sha1': "fake_sha", "sha256": "fake_sha256"}) - headers = {'Content-Type': - 'application/vnd+openstack.glare-custom-location+json'} - self.put(url=url + '/blob', data=body, - status=200, headers=headers) - - # test re-add failed - self.put(url=url + '/blob', data=body, status=409, headers=headers) - # add to non-existing property - self.put(url=url + '/blob_non_exist', data=body, status=400, - headers=headers) - - # Get the artifact, blob property should have status 'active' - art = self.get(url=url, status=200) - self.assertEqual('active', art['blob']['status']) - self.assertEqual('fake', art['blob']['md5']) - self.assertEqual('fake_sha', art['blob']['sha1']) - self.assertEqual('fake_sha256', art['blob']['sha256']) - self.assertIsNone(art['blob']['size']) - self.assertIsNone(art['blob']['content_type']) - self.assertEqual('https://www.apache.org/licenses/LICENSE-2.0.txt', - art['blob']['url']) - self.assertNotIn('id', art['blob']) - - # Set custom location - url = '/sample_artifact/%s' % art['id'] - self.put(url=url + '/dict_of_blobs/blob', data=body, - status=200, headers=headers) - - # Get the artifact, blob property should have status 'active' - art = self.get(url=url, status=200) - self.assertEqual('active', art['dict_of_blobs']['blob']['status']) - self.assertIsNotNone(art['dict_of_blobs']['blob']['md5']) - self.assertIsNone(art['dict_of_blobs']['blob']['size']) - self.assertIsNone(art['dict_of_blobs']['blob']['content_type']) - self.assertEqual('https://www.apache.org/licenses/LICENSE-2.0.txt', - art['dict_of_blobs']['blob']['url']) - self.assertNotIn('id', art['dict_of_blobs']['blob']) - # test re-add failed - self.put(url=url + '/dict_of_blobs/blob', data=body, status=409, - headers=headers) - - # test request failed with non-json containment - self.put(url=url + '/dict_of_blobs/blob_incorrect', data="incorrect", - status=400, headers=headers) - - # delete the artifact - self.delete(url=url) - - -class TestTags(base.TestArtifact): - def test_tags(self): - # Create artifact - art = self.create_artifact({'name': 'name5', - 'version': '1.0', - 'tags': ['tag1', 'tag2', 'tag3'], - 'int1': 2048, - 'float1': 987.654, - 'str1': 'lalala', - 'bool1': False, - 'string_required': '123'}) - self.assertIsNotNone(art['id']) - - url = '/sample_artifact/%s' % art['id'] - data = [{ - "op": "replace", - "path": "/status", - "value": "active" - }] - art = self.patch(url=url, data=data, status=200) - self.assertEqual('active', art['status']) - art = self.admin_action(art['id'], self.make_public) - - self.assertEqual('public', art['visibility']) - # only admins can update tags for public artifacts - self.set_user("admin") - - # Check that tags created correctly - url = '/sample_artifact/%s' % art['id'] - resp = self.get(url=url, status=200) - for tag in ['tag1', 'tag2', 'tag3']: - self.assertIn(tag, resp['tags']) - - # Set new tag list to the art - body = [{"op": "replace", - "path": "/tags", - "value": ["new_tag1", "new_tag2", "new_tag3"]}] - resp = self.patch(url=url, data=body, status=200) - for tag in ['new_tag1', 'new_tag2', 'new_tag3']: - self.assertIn(tag, resp['tags']) - - # Delete all tags from the art - body = [{"op": "replace", - "path": "/tags", - "value": []}] - resp = self.patch(url=url, data=body, status=200) - self.assertEqual([], resp['tags']) - - # Set new tags as null - body = [{"op": "replace", - "path": "/tags", - "value": None}] - resp = self.patch(url=url, data=body, status=200) - self.assertEqual([], resp['tags']) - - # Get the list of tags - resp = self.get(url=url, status=200) - self.assertEqual([], resp['tags']) - - -class TestArtifactOps(base.TestArtifact): - def test_create(self): - """All tests related to artifact creation""" - # check that cannot create artifact for non-existent artifact type - self.post('/incorrect_artifact', {"name": "t"}, status=404) - # check that cannot accept non-json body - self.post('/incorrect_artifact', "incorrect_body", status=400) - # check that cannot accept incorrect content type - self.post('/sample_artifact', {"name": "t"}, status=415, - headers={"Content-Type": "application/octet-stream"}) - # check that cannot create artifact without name - self.create_artifact(data={"int1": 1024}, status=400) - # check that cannot create artifact with too long name - self.create_artifact(data={"name": "t" * 256}, status=400) - # check that cannot create artifact with empty name - self.create_artifact(data={"name": ""}, status=400) - # check that can create af without version - private_art = self.create_artifact( - data={"name": "test_af", "string_required": "test_str"}) - # check that default is set on artifact create - uuid.UUID(private_art['id']) - self.assertEqual('0.0.0', private_art['version']) - self.assertEqual("default", private_art["system_attribute"]) - self.assertEqual(self.users['user1']['tenant_id'], - private_art['owner']) - - # check that cannot create artifact with invalid version - self.create_artifact(data={"name": "test_af", - "version": "dummy_version"}, status=400) - # check that cannot create artifact with empty and long version - self.create_artifact(data={"name": "test_af", - "version": ""}, status=400) - # check that cannot create artifact with empty and long version - self.create_artifact(data={"name": "test_af", - "version": "t" * 256}, status=400) - # check that artifact artifact with the same name-version cannot - # be created - self.create_artifact(data={"name": "test_af"}, status=409) - # check that we cannot create af with the same version but different - # presentation - self.create_artifact(data={"name": "test_af", "version": "0.0"}, - status=409) - # check that we can create artifact with different version and tags - new_af = self.create_artifact( - data={"name": "test_af", "version": "0.0.1", - "tags": ["tag1", "tag2"]}) - self.assertEqual({"tag1", "tag2"}, set(new_af["tags"])) - # check that we cannot create artifact with visibility - self.create_artifact(data={"name": "test_af", "version": "0.0.2", - "visibility": "private"}, status=400) - # check that we cannot create artifact with system property - self.create_artifact(data={"name": "test_af", "version": "0.0.2", - "system_attribute": "test"}, status=403) - # check that we cannot specify blob in create - self.create_artifact(data={"name": "test_af", "version": "0.0.2", - "blob": { - 'url': None, 'size': None, - 'md5': None, 'status': 'saving', - 'external': False}}, status=400) - # check that anonymous user cannot create artifact - self.set_user("anonymous") - self.create_artifact(data={"name": "test_af", "version": "0.0.2"}, - status=403) - # check that another user can create artifact - # with the same name version - self.set_user("user2") - some_af = self.create_artifact(data={"name": "test_af"}) - - # check we can create artifact with all available attributes - # (except blobs and system) - expected = { - "name": "test_big_create", - "link1": "/artifacts/sample_artifact/%s" % some_af['id'], - "bool1": True, - "int1": 2323, - "float1": 0.1, - "str1": "test", - "list_of_str": ["test"], - "list_of_int": [0], - "dict_of_str": {"test": "test"}, - "dict_of_int": {"test": 0}, - "string_mutable": "test", - "string_required": "test", - } - big_af = self.create_artifact(data=expected) - actual = {} - for k in expected: - actual[k] = big_af[k] - self.assertEqual(expected, actual) - # check that we cannot access artifact from other user - # check that active artifact is not available for other user - url = '/sample_artifact/%s' % private_art['id'] - self.get(url, status=404) - # check we cannot create af with non-existing property - self.create_artifact(data={"name": "test_af_ne", - "non_exist": "non_exist"}, status=400) - # activate and publish artifact to check that we can create - # private artifact with the same name version - self.set_user("user1") - - self.patch(url=url, data=self.make_active) - self.admin_action(private_art['id'], self.make_public) - self.create_artifact(data={"name": "test_af", - "string_required": "test_str"}) - - def test_activate(self): - # create artifact to update - private_art = self.create_artifact( - data={"name": "test_af", - "version": "0.0.1"}) - # cannot activate artifact without required for activate attributes - url = '/sample_artifact/%s' % private_art['id'] - self.patch(url=url, data=self.make_active, status=403) - add_required = [{ - "op": "replace", - "path": "/string_required", - "value": "string" - }] - self.patch(url=url, data=add_required) - # can activate if body contains non status changes - make_active_with_updates = self.make_active + [{"op": "replace", - "path": "/description", - "value": "test"}] - active_art = self.patch(url=url, data=make_active_with_updates) - private_art['status'] = 'active' - private_art['activated_at'] = active_art['activated_at'] - private_art['updated_at'] = active_art['updated_at'] - private_art['string_required'] = 'string' - private_art['description'] = 'test' - self.assertEqual(private_art, active_art) - # check that active artifact is not available for other user - self.set_user("user2") - self.get(url, status=404) - self.set_user("user1") - - # test that activate is idempotent - self.patch(url=url, data=self.make_active) - # test activate deleted artifact - self.delete(url=url) - self.patch(url=url, data=self.make_active, status=404) - - def test_publish(self): - # create artifact to update - self.set_user('admin') - private_art = self.create_artifact( - data={"name": "test_af", "string_required": "test_str", - "version": "0.0.1"}) - - url = '/sample_artifact/%s' % private_art['id'] - # test that we cannot publish drafted artifact - self.patch(url=url, data=self.make_public, status=403) - - self.patch(url=url, data=self.make_active) - - # test that cannot publish deactivated artifact - self.patch(url, data=self.make_deactivated) - self.patch(url, data=self.make_public, status=403) - - self.patch(url=url, data=self.make_active) - - # test that visibility can be specified in the request with - # other updates - make_public_with_updates = self.make_public + [ - {"op": "replace", - "path": "/string_mutable", - "value": "test"}] - self.patch(url=url, data=make_public_with_updates) - # check public artifact - public_art = self.patch(url=url, data=self.make_public) - private_art['activated_at'] = public_art['activated_at'] - private_art['visibility'] = 'public' - private_art['status'] = 'active' - private_art['updated_at'] = public_art['updated_at'] - private_art['string_mutable'] = 'test' - self.assertEqual(private_art, public_art) - # check that public artifact available for simple user - self.set_user("user1") - self.get(url) - self.set_user("admin") - # test that artifact publish with the same name and version failed - duplicate_art = self.create_artifact( - data={"name": "test_af", "string_required": "test_str", - "version": "0.0.1"}) - dup_url = '/sample_artifact/%s' % duplicate_art['id'] - # proceed with duplicate testing - self.patch(url=dup_url, data=self.make_active) - self.patch(url=dup_url, data=self.make_public, status=409) - - def test_delete(self): - # try ro delete not existing artifact - url = '/sample_artifact/111111' - self.delete(url=url, status=404) - - # check that we can delete artifact with soft link - art = self.create_artifact( - data={"name": "test_af", "string_required": "test_str", - "version": "0.0.1"}) - artd = self.create_artifact( - data={"name": "test_afd", "string_required": "test_str", - "version": "0.0.1", - "link1": '/artifacts/sample_artifact/%s' % art['id']}) - - url = '/sample_artifact/%s' % artd['id'] - self.delete(url=url, status=204) - - # try to change status of artifact to deleting - url = '/sample_artifact/%s' % art['id'] - patch = [{'op': 'replace', - 'value': 'deleting', - 'path': '/status'}] - self.patch(url=url, data=patch, status=400) - - # delete artifact via different user (non admin) - self.set_user('user2') - self.delete(url=url, status=404) - - # delete artifact via admin user - self.set_user('admin') - self.delete(url=url, status=204) - - # delete public artifact via different user - self.set_user('user1') - art = self.create_artifact( - data={"name": "test_af", "string_required": "test_str", - "version": "0.0.1"}) - url = '/sample_artifact/%s' % art['id'] - self.patch(url=url, data=self.make_active) - self.admin_action(art['id'], self.make_public) - self.set_user('user2') - self.delete(url=url, status=403) - - self.set_user('user1') - self.delete(url=url, status=403) - self.set_user('admin') - self.delete(url=url) - - # delete deactivated artifact - art = self.create_artifact( - data={"name": "test_af", "string_required": "test_str", - "version": "0.0.1"}) - url = '/sample_artifact/%s' % art['id'] - self.patch(url=url, data=self.make_active) - self.patch(url=url, data=self.make_deactivated) - self.delete(url=url, status=204) - self.get(url=url, status=404) - self.assertEqual(0, len(self.get( - url='/sample_artifact')['sample_artifact'])) - - def test_deactivate(self): - # test artifact deactivate for non-active artifact - private_art = self.create_artifact( - data={"name": "test_af", "string_required": "test_str", - "version": "0.0.1"}) - url = '/sample_artifact/%s' % private_art['id'] - self.admin_action(private_art['id'], self.make_deactivated, 403) - self.patch(url, self.make_active) - self.set_user('admin') - # test can deactivate if there is something else in request - make_deactived_with_updates = [ - {"op": "replace", - "path": "/description", - "value": "test"}] + self.make_deactivated - # test artifact deactivate success - deactivated_art = self.admin_action( - private_art['id'], make_deactived_with_updates) - self.assertEqual("deactivated", deactivated_art["status"]) - self.assertEqual("test", deactivated_art["description"]) - # test deactivate is idempotent - self.patch(url, self.make_deactivated) - - def test_reactivate(self): - self.set_user('admin') - private_art = self.create_artifact( - data={"name": "test_af", "string_required": "test_str", - "version": "0.0.1"}) - url = '/sample_artifact/%s' % private_art['id'] - self.patch(url, self.make_active) - self.admin_action(private_art['id'], self.make_deactivated) - # test can reactivate if there is something else in request - make_reactived_with_updates = self.make_active + [ - {"op": "replace", - "path": "/description", - "value": "test"}] - # test artifact deactivate success - reactivated_art = self.admin_action( - private_art['id'], make_reactived_with_updates) - self.assertEqual("active", reactivated_art["status"]) - self.assertEqual("test", reactivated_art["description"]) - - -class TestUpdate(base.TestArtifact): - def test_update_artifact_before_activate(self): - """Test updates for artifact before activation""" - # create artifact to update - private_art = self.create_artifact(data={"name": "test_af"}) - url = '/sample_artifact/%s' % private_art['id'] - # check we can update artifact - change_version = [{ - "op": "replace", - "path": "/version", - "value": "0.0.2" - }] - self.patch(url=url, data=change_version) - - # wrong patch format fails with 400 error - invalid_patch = { - "op": "replace", - "path": "/version", - "value": "0.0.2" - } - self.patch(url=url, data=invalid_patch, status=400) - - # check that we cannot update af if af with - # the same name or version exists - dup_version = self.create_artifact( - data={"name": "test_af", "version": "0.0.1"}) - dupv_url = '/sample_artifact/%s' % dup_version['id'] - change_version_dup = [{ - "op": "replace", - "path": "/version", - "value": "0.0.2" - }] - self.patch(url=dupv_url, data=change_version_dup, status=409) - - dup_name = self.create_artifact(data={"name": "test_name_af", - "version": "0.0.2"}) - dupn_url = '/sample_artifact/%s' % dup_name['id'] - change_name = [{ - "op": "replace", - "path": "/name", - "value": "test_af" - }] - self.patch(url=dupn_url, data=change_name, status=409) - # check that we can update artifacts dup - # after first artifact updated name and version - change_version[0]['value'] = "0.0.3" - self.patch(url=url, data=change_version) - self.patch(url=dupn_url, data=change_name) - # check that we can update artifact dupv to target version - # also check that after deletion of artifact with the same name - # version I can update dupv - self.delete(dupn_url) - self.patch(url=dupv_url, data=change_version_dup) - # check we cannot update artifact with incorrect content-type - self.patch(url, {}, status=415, - headers={"Content-Type": "application/json"}) - # check we cannot update tags with patch - set_tags = [{ - "op": "replace", - "path": "/tags", - "value": "test_af" - }] - self.patch(url, set_tags, status=400) - # check we cannot update artifact with incorrect json-patch - self.patch(url, "incorrect json patch", status=400) - # check update is correct if there is no update - no_name_update = [{ - "op": "replace", - "path": "/name", - "value": "test_af" - }] - self.patch(url, no_name_update) - # check add new property request rejected - add_prop = [{ - "op": "add", - "path": "/string1", - "value": "test_af" - }] - self.patch(url, add_prop, 400) - # check delete property request rejected - add_prop[0]["op"] = "remove" - add_prop[0]["path"] = "/string_required" - self.patch(url, add_prop, 400) - # check we cannot update system attr with patch - system_attr = [{ - "op": "replace", - "path": "/system_attribute", - "value": "dummy" - }] - self.patch(url, system_attr, 403) - # check cannot update blob attr with patch - blob_attr = [{ - "op": "replace", - "path": "/blob", - "value": {"name": "test_af", "version": "0.0.2", - "blob": {'url': None, 'size': None, 'md5': None, - 'status': 'saving', 'external': False}}}] - self.patch(url, blob_attr, 400) - blob_attr[0]["path"] = "/dict_of_blobs/-" - blob_attr[0]["op"] = "add" - self.patch(url, blob_attr, 400) - # test update correctness for all attributes - big_update_patch = [ - {"op": "replace", "path": "/bool1", "value": True}, - {"op": "replace", "path": "/int1", "value": 2323}, - {"op": "replace", "path": "/float1", "value": 0.1}, - {"op": "replace", "path": "/str1", "value": "test"}, - {"op": "replace", "path": "/list_of_str", "value": ["test"]}, - {"op": "replace", "path": "/list_of_int", "value": [0]}, - {"op": "replace", "path": "/dict_of_str", - "value": {"test": "test"}}, - {"op": "replace", "path": "/dict_of_int", - "value": {"test": 0}}, - {"op": "replace", "path": "/string_mutable", "value": "test"}, - {"op": "replace", "path": "/string_required", "value": "test"}, - ] - upd_af = self.patch(url, big_update_patch) - for patch_item in big_update_patch: - self.assertEqual(patch_item.get("value"), - upd_af[patch_item.get("path")[1:]]) - - # check we can update private artifact - # to the same name version as public artifact - self.patch(url=url, data=self.make_active) - self.admin_action(private_art['id'], self.make_public) - self.patch(url=dupv_url, data=change_version) - - def test_update_after_activate_and_publish(self): - # activate artifact - private_art = self.create_artifact( - data={"name": "test_af", "string_required": "test_str", - "version": "0.0.1"}) - - url = '/sample_artifact/%s' % private_art['id'] - self.patch(url=url, data=self.make_active) - # test that immutable properties cannot be updated - upd_immutable = [{ - "op": "replace", - "path": "/name", - "value": "new_name" - }] - self.patch(url, upd_immutable, status=403) - # test that mutable properties can be updated - upd_mutable = [{ - "op": "replace", - "path": "/string_mutable", - "value": "new_value" - }] - updated_af = self.patch(url, upd_mutable) - self.assertEqual("new_value", updated_af["string_mutable"]) - # test cannot update deactivated artifact - upd_mutable[0]["value"] = "another_new_value" - self.admin_action(private_art['id'], self.make_deactivated) - # test that nobody(even admin) can publish deactivated artifact - self.set_user("admin") - self.patch(url, self.make_public, 403) - self.set_user("user1") - self.patch(url, upd_mutable, 403) - self.admin_action(private_art['id'], self.make_active) - # publish artifact - self.admin_action(private_art['id'], self.make_public) - # check we cannot update public artifact anymore - self.patch(url, upd_mutable, status=403) - self.patch(url, upd_mutable, status=403) - # check that admin can update public artifact - self.set_user("admin") - self.patch(url, upd_mutable) - - def test_update_with_validators(self): - data = {'name': 'test_af', - 'version': '0.0.1', - 'list_validators': ['a', 'b', 'c'], - 'dict_validators': {'abc': 'a', 'def': 'b'}} - art = self.create_artifact(data=data) - url = '/sample_artifact/%s' % art['id'] - - # min int_validators value is 10 - patch = [{"op": "replace", "path": "/int_validators", "value": 9}] - self.patch(url=url, data=patch, status=400) - - # max int_validators value is 20 - patch = [{"op": "replace", "path": "/int_validators", "value": 21}] - self.patch(url=url, data=patch, status=400) - - # number 15 is okay - patch = [{"op": "replace", "path": "/int_validators", "value": 15}] - self.patch(url=url, data=patch, status=200) - - # max string length is 255 - patch = [{"op": "replace", "path": "/str1", "value": 'd' * 256}] - self.patch(url=url, data=patch, status=400) - - # 'cc' is not allowed value for the string - patch = [{"op": "replace", "path": "/string_validators", - "value": 'cc'}] - self.patch(url=url, data=patch, status=400) - - # 'aa' is okay - patch = [{"op": "replace", "path": "/string_validators", - "value": 'aa'}] - self.patch(url=url, data=patch) - - # 'bb' is okay too - patch = [{"op": "replace", "path": "/string_validators", - "value": 'bb'}] - self.patch(url=url, data=patch) - - # even if 'c' * 11 is allowed value it exceeds MaxLen's 10 character - # limit - patch = [{"op": "replace", "path": "/string_validators", - "value": 'c' * 11}] - self.patch(url=url, data=patch, status=400) - - # test list has 3 elements maximum - patch = [{"op": "add", "path": "/list_validators/-", "value": 'd'}] - self.patch(url=url, data=patch, status=400) - - patch = [{"op": "replace", "path": "/list_validators", - "value": ['a', 'b', 'c', 'd']}] - self.patch(url=url, data=patch, status=400) - - # test list values are unique - patch = [{"op": "replace", "path": "/list_validators/2", "value": 'b'}] - self.patch(url=url, data=patch, status=400) - - patch = [{"op": "replace", "path": "/list_validators", - "value": ['a', 'b', 'b']}] - self.patch(url=url, data=patch, status=400) - - # regular update works - patch = [{"op": "replace", "path": "/list_validators/1", "value": 'd'}] - af = self.patch(url=url, data=patch) - self.assertEqual(af['list_validators'], ['a', 'd', 'c']) - - patch = [{"op": "replace", "path": "/list_validators", - "value": ['c', 'b', 'a']}] - af = self.patch(url=url, data=patch) - self.assertEqual(af['list_validators'], ['c', 'b', 'a']) - - # test adding wrong key to dict - patch = [{"op": "add", "path": "/dict_validators/aaa", "value": 'b'}] - self.patch(url=url, data=patch, status=400) - - patch = [{"op": "replace", "path": "/dict_validators", - "value": {'abc': 'a', 'def': 'b', 'aaa': 'c'}}] - self.patch(url=url, data=patch, status=400) - - # test dict has 3 elements maximum - patch = [{"op": "add", "path": "/dict_validators/ghi", "value": 'd'}] - self.patch(url=url, data=patch) - - patch = [{"op": "add", "path": "/dict_validators/jkl", "value": 'd'}] - self.patch(url=url, data=patch, status=400) - - patch = [{"op": "replace", "path": "/dict_validators", - "value": {'abc': 'a', 'def': 'b', 'ghi': 'c', 'jkl': 'd'}}] - self.patch(url=url, data=patch, status=400) - - # regular update works - patch = [{"op": "replace", "path": "/dict_validators/abc", - "value": "q"}] - af = self.patch(url=url, data=patch) - self.assertEqual(af['dict_validators'], - {'abc': 'q', 'def': 'b', 'ghi': 'd'}) - - patch = [{"op": "replace", "path": "/dict_validators", - "value": {'abc': 'l', 'def': 'x', 'ghi': 'z'}}] - af = self.patch(url=url, data=patch) - self.assertEqual(af['dict_validators'], - {'abc': 'l', 'def': 'x', 'ghi': 'z'}) - - def test_update_base_fields(self): - data = {'name': 'test_af', - 'version': '0.0.1'} - art = self.create_artifact(data=data) - url = '/sample_artifact/%s' % art['id'] - - # INT - # float to int - patch = [{"op": "replace", - "path": "/int1", - "value": 1.1}] - art = self.patch(url=url, data=patch) - self.assertEqual(1, art['int1']) - - # str(int) to int - patch = [{"op": "replace", - "path": "/int1", - "value": '2'}] - art = self.patch(url=url, data=patch) - self.assertEqual(2, art['int1']) - - # str(float) to int - patch = [{"op": "replace", - "path": "/int1", - "value": '3.0'}] - self.patch(url=url, data=patch, status=400) - - # str(int) to int - patch = [{"op": "replace", - "path": "/int1", - "value": ''}] - self.patch(url=url, data=patch, status=400) - - # empty list to int - patch = [{"op": "replace", - "path": "/int1", - "value": []}] - self.patch(url=url, data=patch, status=400) - - # empty dict to int - patch = [{"op": "replace", - "path": "/int1", - "value": {}}] - self.patch(url=url, data=patch, status=400) - - # bool to int - patch = [{"op": "replace", - "path": "/int1", - "value": True}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(1, art['int1']) - - patch = [{"op": "replace", - "path": "/int1", - "value": False}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(0, art['int1']) - - # FLOAT - # int to float - patch = [{"op": "replace", - "path": "/float1", - "value": 1}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(1.0, art['float1']) - - # str(int) to float - patch = [{"op": "replace", - "path": "/float1", - "value": '2'}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(2.0, art['float1']) - - # str(int) to float - patch = [{"op": "replace", - "path": "/float1", - "value": []}] - self.patch(url=url, data=patch, status=400) - - # str(int) to float - patch = [{"op": "replace", - "path": "/float1", - "value": {}}] - self.patch(url=url, data=patch, status=400) - - # str(bool) to float - patch = [{"op": "replace", - "path": "/float1", - "value": 'True'}] - self.patch(url=url, data=patch, status=400) - - # bool to float - patch = [{"op": "replace", - "path": "/float1", - "value": True}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(1.0, art['float1']) - - # str(float) to float - patch = [{"op": "replace", - "path": "/float1", - "value": '3.0'}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(3.0, art['float1']) - - # STRING - # str to str - patch = [{"op": "replace", - "path": "/str1", - "value": '3.0'}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual('3.0', art['str1']) - - # int to str - patch = [{"op": "replace", - "path": "/str1", - "value": 1}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual('1', art['str1']) - - # float to str - patch = [{"op": "replace", - "path": "/str1", - "value": 1.0}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual('1.0', art['str1']) - - # bool to str - patch = [{"op": "replace", - "path": "/str1", - "value": True}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual('True', art['str1']) - - # empty list to str - patch = [{"op": "replace", - "path": "/str1", - "value": []}] - self.patch(url=url, data=patch, status=400) - - patch = [{"op": "replace", - "path": "/str1", - "value": {}}] - self.patch(url=url, data=patch, status=400) - - # BOOL - # int to bool - patch = [{"op": "replace", - "path": "/bool1", - "value": 1}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(True, art['bool1']) - - patch = [{"op": "replace", - "path": "/bool1", - "value": 0}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(False, art['bool1']) - - # float to bool - patch = [{"op": "replace", - "path": "/bool1", - "value": 2.1}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(False, art['bool1']) - - patch = [{"op": "replace", - "path": "/bool1", - "value": 1.1}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(False, art['bool1']) - - # string to bool - patch = [{"op": "replace", - "path": "/bool1", - "value": '1'}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(True, art['bool1']) - - patch = [{"op": "replace", - "path": "/bool1", - "value": ''}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(False, art['bool1']) - # [] to bool - patch = [{"op": "replace", - "path": "/bool1", - "value": []}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(False, art['bool1']) - - patch = [{"op": "replace", - "path": "/bool1", - "value": [1]}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(False, art['bool1']) - # {} to bool - patch = [{"op": "replace", - "path": "/bool1", - "value": {}}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(False, art['bool1']) - - patch = [{"op": "replace", - "path": "/bool1", - "value": {'1', 1}}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(False, art['bool1']) - - # LIST OF STR AND INT - # {} to list of str - patch = [{"op": "replace", - "path": "/list_of_str", - "value": {}}] - self.patch(url=url, data=patch, status=400) - - # [] to list of str - patch = [{"op": "replace", - "path": "/list_of_str", - "value": []}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual([], art['list_of_str']) - - # list of int to list of str - patch = [{"op": "replace", - "path": "/list_of_str", - "value": [1, 2, 3]}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(['1', '2', '3'], art['list_of_str']) - - # list of bool to list of str - patch = [{"op": "replace", - "path": "/list_of_str", - "value": [True, False, True]}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual(['True', 'False', 'True'], art['list_of_str']) - - # str to list of str - patch = [{"op": "replace", - "path": "/list_of_str", - "value": '123'}] - self.patch(url=url, data=patch, status=400) - - # int to list of str - patch = [{"op": "replace", - "path": "/list_of_str", - "value": 11}] - self.patch(url=url, data=patch, status=400) - - # bool to list of str - patch = [{"op": "replace", - "path": "/list_of_str", - "value": True}] - self.patch(url=url, data=patch, status=400) - - # Dict OF INT - # [] to dict of int - patch = [{"op": "replace", - "path": "/dict_of_int", - "value": []}] - self.patch(url=url, data=patch, status=400) - - # {} to dict of int - patch = [{"op": "replace", - "path": "/dict_of_int", - "value": {}}] - art = self.patch(url=url, data=patch, status=200) - self.assertEqual({}, art['dict_of_int']) - - # int to dict of int - patch = [{"op": "replace", - "path": "/dict_of_int", - "value": 1}] - self.patch(url=url, data=patch, status=400) - - # bool to dict of int - patch = [{"op": "replace", - "path": "/dict_of_int", - "value": True}] - self.patch(url=url, data=patch, status=400) - - # string to dict of int - patch = [{"op": "replace", - "path": "/dict_of_int", - "value": 'aaa'}] - self.patch(url=url, data=patch, status=400) - - def test_update_field_dict(self): - art1 = self.create_artifact(data={"name": "art1"}) - - # create artifact without dict prop - data = {'name': 'art_without_dict'} - result = self.post(url='/sample_artifact', status=201, data=data) - self.assertEqual({}, result['dict_of_str']) - - # create artifact with dict prop - data = {'name': 'art_with_dict', - 'dict_of_str': {'a': '1', 'b': '3'}} - result = self.post(url='/sample_artifact', status=201, data=data) - self.assertEqual({'a': '1', 'b': '3'}, result['dict_of_str']) - - # create artifact with empty dict - data = {'name': 'art_with_empty_dict', - 'dict_of_str': {}} - result = self.post(url='/sample_artifact', status=201, data=data) - self.assertEqual({}, result['dict_of_str']) - - # add element in invalid path - data = [{'op': 'add', - 'path': '/dict_of_str', - 'value': 'val1'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - # add new element - data = [{'op': 'add', - 'path': '/dict_of_str/new', - 'value': 'val1'}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual('val1', result['dict_of_str']['new']) - - # add existent element - data = [{'op': 'add', - 'path': '/dict_of_str/new', - 'value': 'val_new'}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual('val_new', result['dict_of_str']['new']) - - # add element with empty key - data = [{'op': 'add', - 'path': '/dict_of_str/', - 'value': 'val1'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - # replace element - data = [{'op': 'replace', - 'path': '/dict_of_str/new', - 'value': 'val2'}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual('val2', result['dict_of_str']['new']) - - # replace non-existent element - data = [{'op': 'replace', - 'path': '/dict_of_str/non_exist', - 'value': 'val2'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - # remove element - data = [{'op': 'remove', - 'path': '/dict_of_str/new', - 'value': 'val2'}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertIsNone(result['dict_of_str'].get('new')) - - # remove non-existent element - data = [{'op': 'remove', - 'path': '/dict_of_str/non_exist', - 'value': 'val2'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - # set value - data = [{'op': 'add', - 'path': '/dict_of_str', - 'value': {'key1': 'val1', 'key2': 'val2'}}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual({'key1': 'val1', 'key2': 'val2'}, - result['dict_of_str']) - - # replace value - data = [{'op': 'add', - 'path': '/dict_of_str', - 'value': {'key11': 'val1', 'key22': 'val2'}}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual({'key11': 'val1', 'key22': 'val2'}, - result['dict_of_str']) - - # remove value - data = [{'op': 'add', - 'path': '/dict_of_str', - 'value': {}}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual({}, - result['dict_of_str']) - - # set an element of the wrong non-conversion type value - data = [{'op': 'add', - 'path': '/dict_of_str/wrong_type', - 'value': [1, 2, 4]}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - # set an element of the wrong conversion type value - data = [{'op': 'add', - 'path': '/dict_of_str/wrong_type', - 'value': 1}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual('1', result['dict_of_str']['wrong_type']) - - # add element with None value - data = [{'op': 'add', - 'path': '/dict_of_blob/nane_value', - 'value': None}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - def test_update_field_list(self): - art1 = self.create_artifact(data={"name": "art1"}) - - # create artifact without list prop - data = {'name': 'art_without_list'} - result = self.post(url='/sample_artifact', status=201, data=data) - self.assertEqual([], result['list_of_str']) - - # create artifact with list prop - data = {'name': 'art_with_list', - 'list_of_str': ['a', 'b']} - result = self.post(url='/sample_artifact', status=201, data=data) - self.assertEqual(['a', 'b'], result['list_of_str']) - - # create artifact with empty list - data = {'name': 'art_with_empty_list', - 'list_of_str': []} - result = self.post(url='/sample_artifact', status=201, data=data) - self.assertEqual([], result['list_of_str']) - - # add value - data = [{'op': 'add', - 'path': '/list_of_str', - 'value': ['b', 'd']}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual(['b', 'd'], result['list_of_str']) - - # replace value - data = [{'op': 'replace', - 'path': '/list_of_str', - 'value': ['aa', 'dd']}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual(['aa', 'dd'], result['list_of_str']) - - # remove value - data = [{'op': 'add', - 'path': '/list_of_str', - 'value': []}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual([], result['list_of_str']) - - # add new element on empty list - self.assertEqual([], art1['list_of_str']) - data = [{'op': 'add', - 'path': '/list_of_str/-', - 'value': 'val1'}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual(['val1'], result['list_of_str']) - - # add new element on index - data = [{'op': 'add', - 'path': '/list_of_str/0', - 'value': 'val2'}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual(['val2', 'val1'], result['list_of_str']) - - # add new element on next index - data = [{'op': 'add', - 'path': '/list_of_str/1', - 'value': 'val3'}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual(['val2', 'val3', 'val1'], result['list_of_str']) - - # add new element on default index - data = [{'op': 'add', - 'path': '/list_of_str/-', - 'value': 'val4'}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual(['val2', 'val3', 'val1', 'val4'], - result['list_of_str']) - - # add new element on non-existent index - data = [{'op': 'add', - 'path': '/list_of_str/10', - 'value': 'val2'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - # replace element on index - data = [{'op': 'replace', - 'path': '/list_of_str/1', - 'value': 'val_new'}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual(['val2', 'val_new', 'val1', 'val4'], - result['list_of_str']) - - # replace element on default index - data = [{'op': 'replace', - 'path': '/list_of_str/-', - 'value': 'val-'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - # replace new element on non-existent index - data = [{'op': 'replace', - 'path': '/list_of_str/99', - 'value': 'val_new'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - # remove element on index - data = [{'op': 'remove', - 'path': '/list_of_str/1', - 'value': 'val2'}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual(['val2', 'val1', 'val4'], result['list_of_str']) - - # remove element on default index - data = [{'op': 'remove', - 'path': '/list_of_str/-', - 'value': 'val3'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - # remove new element on non-existent index - data = [{'op': 'remove', - 'path': '/list_of_str/999', - 'value': 'val2'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - def test_update_remove_properties(self): - data = { - "name": "test_big_create", - "version": "1.0.0", - "bool1": True, - "int1": 2323, - "float1": 0.1, - "str1": "test", - "list_of_str": ["test1", "test2"], - "list_of_int": [0, 1, 2], - "dict_of_str": {"test": "test"}, - "dict_of_int": {"test": 0}, - "string_mutable": "test", - "string_required": "test", - } - art1 = self.create_artifact(data=data) - - # remove the whole list of strings - data = [{'op': 'replace', - 'path': '/list_of_str', - 'value': None}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual([], result['list_of_str']) - - # remove the whole list of ints - data = [{'op': 'replace', - 'path': '/list_of_int', - 'value': None}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual([], result['list_of_int']) - - # remove the whole dict of strings - data = [{'op': 'replace', - 'path': '/dict_of_str', - 'value': None}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual({}, result['dict_of_str']) - - # remove the whole dict of ints - data = [{'op': 'replace', - 'path': '/dict_of_int', - 'value': None}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual({}, result['dict_of_int']) - - # remove bool1 - data = [{'op': 'replace', - 'path': '/bool1', - 'value': None}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertEqual(False, result['bool1']) - - # remove int1 - data = [{'op': 'replace', - 'path': '/int1', - 'value': None}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertIsNone(result['int1']) - - # remove float1 - data = [{'op': 'replace', - 'path': '/float1', - 'value': None}] - url = '/sample_artifact/%s' % art1['id'] - result = self.patch(url=url, data=data) - self.assertIsNone(result['float1']) - - # cannot remove id, because it's a system field - data = [{'op': 'replace', - 'path': '/id', - 'value': None}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=403) - - # cannot remove name - data = [{'op': 'replace', - 'path': '/name', - 'value': None}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - headers = {'Content-Type': 'application/octet-stream'} - self.put(url=url + '/blob', data="d" * 1000, headers=headers) - - # cannot remove blob - data = [{'op': 'replace', - 'path': '/blob', - 'value': None}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - def test_update_malformed_json_patch(self): - data = {'name': 'ttt'} - art1 = self.create_artifact(data=data) - - data = [{'op': 'replace', 'path': None, 'value': 'aaa'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'replace', 'path': '/', 'value': 'aaa'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'replace', 'path': '//', 'value': 'aaa'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'replace', 'path': 'name/', 'value': 'aaa'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'replace', 'path': '*/*', 'value': 'aaa'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'add', 'path': None, 'value': 'aaa'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'add', 'path': '/', 'value': 'aaa'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'add', 'path': '//', 'value': 'aaa'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'add', 'path': 'name/', 'value': 'aaa'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'add', 'path': '*/*', 'value': 'aaa'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'add', 'path': '/name'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'replace', 'path': None}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'replace', 'path': '/'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'replace', 'path': '//'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'replace', 'path': 'name/'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'replace', 'path': '*/*'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - data = [{'op': 'no-op', 'path': '/name', 'value': 'aaa'}] - url = '/sample_artifact/%s' % art1['id'] - self.patch(url=url, data=data, status=400) - - -class TestLinks(base.TestArtifact): - def test_manage_links(self): - some_af = self.create_artifact(data={"name": "test_af"}) - dep_af = self.create_artifact(data={"name": "test_dep_af"}) - dep_url = "/artifacts/sample_artifact/%s" % some_af['id'] - - # set valid link - patch = [{"op": "replace", "path": "/link1", "value": dep_url}] - url = '/sample_artifact/%s' % dep_af['id'] - af = self.patch(url=url, data=patch) - self.assertEqual(af['link1'], dep_url) - - # remove link from artifact - patch = [{"op": "replace", "path": "/link1", "value": None}] - af = self.patch(url=url, data=patch) - self.assertIsNone(af['link1']) - - # try to set invalid link - patch = [{"op": "replace", "path": "/link1", "value": "Invalid"}] - self.patch(url=url, data=patch, status=400) - - # try to set link to non-existing artifact - non_exiting_url = "/artifacts/sample_artifact/%s" % uuid.uuid4() - patch = [{"op": "replace", - "path": "/link1", - "value": non_exiting_url}] - self.patch(url=url, data=patch, status=400) - - def test_manage_dict_of_links(self): - some_af = self.create_artifact(data={"name": "test_af"}) - dep_af = self.create_artifact(data={"name": "test_dep_af"}) - dep_url = "/artifacts/sample_artifact/%s" % some_af['id'] - - # set valid link - patch = [{"op": "add", - "path": "/dict_of_links/link1", - "value": dep_url}] - url = '/sample_artifact/%s' % dep_af['id'] - af = self.patch(url=url, data=patch) - self.assertEqual(af['dict_of_links']['link1'], dep_url) - - # remove link from artifact - patch = [{"op": "remove", - "path": "/dict_of_links/link1"}] - af = self.patch(url=url, data=patch) - self.assertNotIn('link1', af['dict_of_links']) - - # try to set invalid link - patch = [{"op": "replace", - "path": "/dict_of_links/link1", - "value": "Invalid"}] - self.patch(url=url, data=patch, status=400) - - # try to set link to non-existing artifact - non_exiting_url = "/artifacts/sample_artifact/%s" % uuid.uuid4() - patch = [{"op": "replace", - "path": "/dict_of_links/link1", - "value": non_exiting_url}] - self.patch(url=url, data=patch, status=400) - - def test_manage_list_of_links(self): - some_af = self.create_artifact(data={"name": "test_af"}) - dep_af = self.create_artifact(data={"name": "test_dep_af"}) - dep_url = "/artifacts/sample_artifact/%s" % some_af['id'] - - # set valid link - patch = [{"op": "add", - "path": "/list_of_links/-", - "value": dep_url}] - url = '/sample_artifact/%s' % dep_af['id'] - af = self.patch(url=url, data=patch) - self.assertEqual(af['list_of_links'][0], dep_url) - - # remove link from artifact - patch = [{"op": "remove", - "path": "/list_of_links/0"}] - af = self.patch(url=url, data=patch) - self.assertEqual(0, len(af['list_of_links'])) - - # try to set invalid link - patch = [{"op": "add", - "path": "/list_of_links/-", - "value": "Invalid"}] - self.patch(url=url, data=patch, status=400) - - # try to set link to non-existing artifact - non_exiting_url = "/artifacts/sample_artifact/%s" % uuid.uuid4() - patch = [{"op": "add", - "path": "/list_of_links/-", - "value": non_exiting_url}] - self.patch(url=url, data=patch, status=400) diff --git a/glare/tests/functional/test_schemas.py b/glare/tests/functional/test_schemas.py deleted file mode 100644 index 5076bd4..0000000 --- a/glare/tests/functional/test_schemas.py +++ /dev/null @@ -1,950 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import jsonschema - -from glare.tests.functional import base - -fixture_base_props = { - u'activated_at': { - u'description': u'Datetime when artifact has became active.', - u'filter_ops': [u'lt', u'gt'], - u'format': u'date-time', - u'glareType': u'DateTime', - u'readOnly': True, - u'required_on_activate': False, - u'sortable': True, - u'type': [u'string', - u'null']}, - u'created_at': { - u'description': u'Datetime when artifact has been created.', - u'filter_ops': [u'lt', u'gt'], - u'format': u'date-time', - u'glareType': u'DateTime', - u'readOnly': True, - u'sortable': True, - u'type': u'string'}, - u'description': {u'default': u'', - u'description': u'Artifact description.', - u'filter_ops': [], - u'glareType': u'String', - u'maxLength': 4096, - u'mutable': True, - u'required_on_activate': False, - u'type': [u'string', - u'null']}, - u'id': {u'description': u'Artifact UUID.', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'pattern': u'^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}' - u'-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$', - u'readOnly': True, - u'sortable': True, - u'type': u'string'}, - u'metadata': {u'additionalProperties': {u'maxLength': 255, - u'minLength': 1, - u'type': u'string'}, - u'default': {}, - u'description': u'Key-value dict with useful information ' - u'about an artifact.', - u'filter_ops': [u'eq', u'neq', u'in'], - u'glareType': u'StringDict', - u'maxProperties': 255, - u'required_on_activate': False, - u'type': [u'object', - u'null']}, - u'name': {u'description': u'Artifact Name.', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'minLength': 1, - u'required_on_activate': False, - u'sortable': True, - u'type': u'string'}, - u'owner': {u'description': u'ID of user/tenant who uploaded artifact.', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'readOnly': True, - u'required_on_activate': False, - u'sortable': True, - u'type': u'string'}, - u'status': {u'default': u'drafted', - u'description': u'Artifact status.', - u'enum': [u'drafted', - u'active', - u'deactivated', - u'deleted'], - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'mutable': True, - u'sortable': True, - u'type': u'string'}, - u'tags': {u'default': [], - u'description': u'List of tags added to Artifact.', - u'filter_ops': [], - u'glareType': u'StringList', - u'items': {u'maxLength': 255, - u'minLength': 1, - u'pattern': u'^[^,/]+$', - u'type': u'string'}, - u'maxItems': 255, - u'mutable': True, - u'required_on_activate': False, - u'type': [u'array', u'null'], - u'uniqueItems': True}, - u'updated_at': { - u'description': u'Datetime when artifact has been updated last time.', - u'filter_ops': [u'lt', u'gt'], - u'format': u'date-time', - u'glareType': u'DateTime', - u'mutable': True, - u'readOnly': True, - u'sortable': True, - u'type': u'string'}, - u'version': {u'default': u'0.0.0', - u'description': u'Artifact version(semver).', - u'filter_ops': [u'eq', - u'neq', - u'in', - u'gt', - u'gte', - u'lt', - u'lte'], - u'glareType': u'String', - u'pattern': u'/^([0-9]+)\\.([0-9]+)\\.([0-9]+)(?:-' - u'([0-9A-Za-z-]+(?:\\.[0-9A-Za-z-]+)*))?' - u'(?:\\+[0-9A-Za-z-]+)?$/', - u'required_on_activate': False, - u'sortable': True, - u'type': u'string'}, - u'visibility': {u'default': u'private', - u'description': u'Artifact visibility that defines if ' - u'artifact can be available to other ' - u'users.', - u'enum': [u'private', u'public'], - u'filter_ops': [u'eq', u'neq', u'in'], - u'glareType': u'String', - u'mutable': True, - u'sortable': True, - u'type': u'string'} -} - - -def generate_type_props(props): - props.update(fixture_base_props) - return props - - -fixtures = { - u'sample_artifact': { - u'name': u'sample_artifact', - u'properties': generate_type_props({ - u'blob': {u'additionalProperties': False, - u'description': u'I am Blob', - u'filter_ops': [], - u'glareType': u'Blob', - u'mutable': True, - u'properties': { - u'md5': {u'type': [u'string', u'null']}, - u'sha1': {u'type': [u'string', u'null']}, - u'sha256': {u'type': [u'string', u'null']}, - u'content_type': { - u'type': u'string'}, - u'external': { - u'type': u'boolean'}, - u'size': {u'type': [ - u'number', - u'null']}, - u'status': { - u'enum': [ - u'saving', - u'active'], - u'type': u'string'}}, - u'required': [u'size', - u'md5', u'sha1', u'sha256', - u'external', - u'status', - u'content_type'], - u'required_on_activate': False, - u'type': [u'object', - u'null']}, - u'bool1': {u'default': False, - u'filter_ops': [u'eq'], - u'glareType': u'Boolean', - u'required_on_activate': False, - u'type': [u'boolean', - u'null']}, - u'bool2': {u'default': False, - u'filter_ops': [u'eq'], - u'glareType': u'Boolean', - u'required_on_activate': False, - u'type': [u'boolean', - u'null']}, - u'link1': {u'filter_ops': [u'eq', - u'neq'], - u'glareType': u'Link', - u'required_on_activate': False, - u'type': [u'string', - u'null']}, - u'link2': {u'filter_ops': [u'eq', - u'neq'], - u'glareType': u'Link', - u'required_on_activate': False, - u'type': [u'string', - u'null']}, - u'dict_of_blobs': { - u'additionalProperties': { - u'additionalProperties': False, - u'properties': { - u'md5': {u'type': [u'string', u'null']}, - u'sha1': {u'type': [u'string', u'null']}, - u'sha256': {u'type': [u'string', u'null']}, - u'content_type': { - u'type': u'string'}, - u'external': { - u'type': u'boolean'}, - u'size': { - u'type': [ - u'number', - u'null']}, - u'status': { - u'enum': [ - u'saving', - u'active'], - u'type': u'string'}}, - u'required': [u'size', - u'md5', u'sha1', u'sha256', - u'external', - u'status', - u'content_type'], - u'type': [u'object', - u'null']}, - u'default': {}, - u'filter_ops': [], - u'glareType': u'BlobDict', - u'maxProperties': 255, - u'required_on_activate': False, - u'type': [u'object', - u'null']}, - u'dict_of_int': { - u'additionalProperties': { - u'type': u'integer'}, - u'default': {}, - u'filter_ops': [u'eq', u'in'], - u'glareType': u'IntegerDict', - u'maxProperties': 255, - u'required_on_activate': False, - u'type': [u'object', - u'null']}, - u'dict_of_links': {u'additionalProperties': {u'type': u'string'}, - u'default': {}, - u'filter_ops': [u'eq'], - u'glareType': u'LinkDict', - u'maxProperties': 255, - u'mutable': True, - u'required_on_activate': False, - u'type': [u'object', u'null']}, - u'list_of_links': {u'default': [], - u'filter_ops': [u'eq'], - u'glareType': u'LinkList', - u'items': {u'type': u'string'}, - u'maxItems': 255, - u'mutable': True, - u'required_on_activate': False, - u'type': [u'array', u'null']}, - u'dict_of_str': { - u'additionalProperties': {u'maxLength': 255, - u'type': u'string'}, - u'default': {}, - u'filter_ops': [u'eq', u'in'], - u'glareType': u'StringDict', - u'maxProperties': 255, - u'required_on_activate': False, - u'type': [u'object', - u'null']}, - u'dict_validators': { - u'additionalProperties': False, - u'filter_ops': [], - u'glareType': u'StringDict', - u'maxProperties': 3, - u'properties': {u'abc': {u'maxLength': 255, - u'type': [u'string', - u'null']}, - u'def': {u'maxLength': 255, - u'type': [u'string', - u'null']}, - u'ghi': {u'maxLength': 255, - u'type': [u'string', - u'null']}, - u'jkl': {u'maxLength': 255, - u'type': [u'string', - u'null']}}, - u'required_on_activate': False, - u'type': [u'object', - u'null']}, - u'float1': {u'filter_ops': [u'eq', - u'neq', - u'in', - u'gt', - u'gte', - u'lt', - u'lte'], - u'glareType': u'Float', - u'required_on_activate': False, - u'sortable': True, - u'type': [u'number', - u'null']}, - u'float2': {u'filter_ops': [u'eq', - u'neq', - u'in', - u'gt', - u'gte', - u'lt', - u'lte'], - u'glareType': u'Float', - u'required_on_activate': False, - u'sortable': True, - u'type': [u'number', - u'null']}, - u'int1': {u'filter_ops': [u'eq', - u'neq', - u'in', - u'gt', - u'gte', - u'lt', - u'lte'], - u'glareType': u'Integer', - u'required_on_activate': False, - u'sortable': True, - u'type': [u'integer', - u'null']}, - u'int2': {u'filter_ops': [u'eq', - u'neq', - u'in', - u'gt', - u'gte', - u'lt', - u'lte'], - u'glareType': u'Integer', - u'required_on_activate': False, - u'sortable': True, - u'type': [u'integer', - u'null']}, - u'int_validators': {u'filter_ops': [u'eq', - u'neq', - u'in', - u'gt', - u'gte', - u'lt', - u'lte'], - u'glareType': u'Integer', - u'maximum': 20, - u'minimum': 10, - u'required_on_activate': False, - u'type': [u'integer', - u'null']}, - u'list_of_int': {u'default': [], - u'filter_ops': [u'eq', u'in'], - u'glareType': u'IntegerList', - u'items': { - u'type': u'integer'}, - u'maxItems': 255, - u'required_on_activate': False, - u'type': [u'array', - u'null']}, - u'list_of_str': {u'default': [], - u'filter_ops': [u'eq', u'in'], - u'glareType': u'StringList', - u'items': {u'maxLength': 255, - u'type': u'string'}, - u'maxItems': 255, - u'required_on_activate': False, - u'type': [u'array', - u'null']}, - u'list_validators': {u'default': [], - u'filter_ops': [], - u'glareType': u'StringList', - u'items': {u'maxLength': 255, - u'type': u'string'}, - u'maxItems': 3, - u'required_on_activate': False, - u'type': [u'array', - u'null'], - u'uniqueItems': True}, - u'small_blob': {u'additionalProperties': False, - u'filter_ops': [], - u'glareType': u'Blob', - u'mutable': True, - u'properties': { - u'md5': {u'type': [u'string', u'null']}, - u'sha1': {u'type': [u'string', u'null']}, - u'sha256': {u'type': [u'string', u'null']}, - u'content_type': { - u'type': u'string'}, - u'external': { - u'type': u'boolean'}, - u'size': { - u'type': [ - u'number', - u'null']}, - u'status': { - u'enum': [ - u'saving', - u'active'], - u'type': u'string'}}, - u'required': [u'size', - u'md5', u'sha1', u'sha256', - u'external', - u'status', - u'content_type'], - u'required_on_activate': False, - u'type': [u'object', - u'null']}, - u'str1': {u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'required_on_activate': False, - u'sortable': True, - u'type': [u'string', - u'null']}, - u'string_mutable': {u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'mutable': True, - u'required_on_activate': False, - u'type': [u'string', - u'null']}, - u'string_required': { - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'type': [u'string', - u'null']}, - u'string_validators': { - u'enum': [u'aa', - u'bb', - u'ccccccccccc'], - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 10, - u'required_on_activate': False, - u'type': [u'string', - u'null']}, - u'system_attribute': {u'default': u'default', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'readOnly': True, - u'sortable': True, - u'type': [u'string', - u'null']} - }), - u'required': [u'name'], - u'title': u'Artifact type sample_artifact of version 1.0', - u'version': u'1.0', - u'type': u'object'}, - u'tosca_templates': { - u'name': u'tosca_templates', - u'properties': generate_type_props({ - u'template': { - u'additionalProperties': False, - u'description': u'TOSCA template body.', - u'filter_ops': [], - u'glareType': u'Blob', - u'properties': { - u'md5': {u'type': [u'string', u'null']}, - u'sha1': {u'type': [u'string', u'null']}, - u'sha256': {u'type': [u'string', u'null']}, - u'content_type': { - u'type': u'string'}, - u'external': {u'type': u'boolean'}, - u'size': {u'type': [u'number', - u'null']}, - u'status': {u'enum': [u'saving', - u'active'], - u'type': u'string'}}, - u'required': [u'size', - u'md5', u'sha1', u'sha256', - u'external', - u'status', - u'content_type'], - u'type': [u'object', - u'null']}, - u'template_format': {u'description': u'TOSCA template format.', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'type': [u'string', - u'null']}, - }), - u'required': [u'name'], - u'version': u'1.0', - u'title': u'Artifact type tosca_templates of version 1.0', - u'type': u'object'}, - u'murano_packages': { - u'name': u'murano_packages', - u'properties': generate_type_props({ - u'categories': { - u'default': [], - u'description': u'List of categories specified for ' - u'the package.', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'StringList', - u'items': {u'maxLength': 255, - u'type': u'string'}, - u'maxItems': 255, - u'mutable': True, - u'type': [u'array', - u'null']}, - u'class_definitions': { - u'default': [], - u'description': u'List of class definitions ' - u'in the package.', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'StringList', - u'items': {u'maxLength': 255, - u'type': u'string'}, - u'maxItems': 255, - u'type': [u'array', - u'null'], - u'uniqueItems': True}, - u'dependencies': { - u'default': [], - u'description': u'List of package dependencies for ' - u'this package.', - u'filter_ops': [u'eq', - u'neq'], - u'glareType': u'LinkList', - u'items': {u'type': u'string'}, - u'maxItems': 255, - u'required_on_activate': False, - u'type': [u'array', - u'null']}, - u'display_name': { - u'description': u'Package name in human-readable format.', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'mutable': True, - u'type': [u'string', - u'null']}, - u'inherits': { - u'additionalProperties': {u'maxLength': 255, - u'type': u'string'}, - u'default': {}, - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'StringDict', - u'maxProperties': 255, - u'type': [u'object', - u'null']}, - u'keywords': {u'default': [], - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'StringList', - u'items': {u'maxLength': 255, - u'type': u'string'}, - u'maxItems': 255, - u'mutable': True, - u'type': [u'array', - u'null']}, - u'package': { - u'additionalProperties': False, - u'description': u'Murano Package binary.', - u'filter_ops': [], - u'glareType': u'Blob', - u'properties': {u'md5': {u'type': [u'string', u'null']}, - u'sha1': {u'type': [u'string', u'null']}, - u'sha256': {u'type': [u'string', u'null']}, - u'content_type': {u'type': u'string'}, - u'external': {u'type': u'boolean'}, - u'size': {u'type': [u'number', - u'null']}, - u'status': {u'enum': [u'saving', - u'active'], - u'type': u'string'}}, - u'required': [u'size', - u'md5', u'sha1', u'sha256', - u'external', - u'status', - u'content_type'], - u'required_on_activate': False, - u'type': [u'object', - u'null']}, - u'type': { - u'default': u'Application', - u'description': u'Package type.', - u'enum': [u'Application', - u'Library', - ], - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'type': [u'string', - u'null']} - }), - u'required': [u'name'], - u'version': u'1.0', - u'title': u'Artifact type murano_packages of version 1.0', - u'type': u'object'}, - u'images': { - u'name': u'images', - u'properties': generate_type_props({ - u'architecture': { - u'description': u'Operating system architecture as specified ' - u'in http://docs.openstack.org/trunk/' - u'openstack-compute/admin/content/adding-' - u'images.html', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'required_on_activate': False, - u'type': [u'string', - u'null']}, - u'container_format': {u'description': u'Image container format.', - u'enum': [u'ami', - u'ari', - u'aki', - u'bare', - u'ovf', - u'ova', - u'docker', - ], - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'type': [u'string', - u'null']}, - - u'disk_format': {u'description': u'Image disk format.', - u'enum': [u'ami', - u'ari', - u'aki', - u'vhd', - u'vhdx', - u'vmdk', - u'raw', - u'qcow2', - u'vdi', - u'iso', - ], - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'type': [u'string', u'null']}, - u'image': {u'additionalProperties': False, - u'description': u'Image binary.', - u'filter_ops': [], - u'glareType': u'Blob', - u'properties': { - u'md5': {u'type': [u'string', u'null']}, - u'sha1': {u'type': [u'string', u'null']}, - u'sha256': {u'type': [u'string', u'null']}, - u'content_type': {u'type': u'string'}, - u'external': {u'type': u'boolean'}, - u'size': {u'type': [u'number', - u'null']}, - u'status': {u'enum': [u'saving', - u'active'], - u'type': u'string'}}, - u'required': [u'size', - u'md5', u'sha1', u'sha256', - u'external', - u'status', - u'content_type'], - u'required_on_activate': False, - u'type': [u'object', u'null']}, - u'instance_uuid': { - u'description': u'Metadata which can be used to record which ' - u'instance this image is associated with. ' - u'(Informational only, does not create an ' - u'instance snapshot.)', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'required_on_activate': False, - u'type': [u'string', - u'null']}, - u'kernel_id': { - u'description': u'ID of image stored in Glare that should be ' - u'used as the kernel when booting an ' - u'AMI-style image.', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'pattern': u'^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-' - u'([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-' - u'([0-9a-fA-F]){12}$', - u'required_on_activate': False, - u'type': [u'string', u'null']}, - u'min_disk': { - u'description': u'Minimal disk space required to boot image.', - u'filter_ops': [u'eq', - u'neq', - u'in', - u'gt', - u'gte', - u'lt', - u'lte'], - u'glareType': u'Integer', - u'minimum': 0, - u'required_on_activate': False, - u'type': [u'integer', u'null']}, - u'min_ram': { - u'description': u'Minimal RAM required to boot image.', - u'filter_ops': [u'eq', - u'neq', - u'in', - u'gt', - u'gte', - u'lt', - u'lte'], - u'glareType': u'Integer', - u'minimum': 0, - u'required_on_activate': False, - u'type': [u'integer', u'null']}, - u'os_distro': { - u'description': u'Common name of operating system distribution' - u' as specified in http://docs.openstack.org/' - u'trunk/openstack-compute/admin/content/' - u'adding-images.html', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'required_on_activate': False, - u'type': [u'string', u'null']}, - u'os_version': { - u'description': u'Operating system version as specified by the' - u' distributor', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'required_on_activate': False, - u'type': [u'string', u'null']}, - u'ramdisk_id': { - u'description': u'ID of image stored in Glare that should be ' - u'used as the ramdisk when booting an ' - u'AMI-style image.', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'pattern': u'^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F])' - u'{4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$', - u'required_on_activate': False, - u'type': [u'string', u'null']}}), - u'required': [u'name'], - u'version': u'1.0', - u'title': u'Artifact type images of version 1.0', - u'type': u'object'}, - u'heat_templates': { - u'name': u'heat_templates', - u'properties': generate_type_props({ - u'default_envs': { - u'additionalProperties': {u'maxLength': 255, - u'type': u'string'}, - u'default': {}, - u'description': u'Default environments that can ' - u'be applied to the template if no ' - u'environments specified by user.', - u'filter_ops': [u'eq', - u'neq', - u'in'], - u'glareType': u'StringDict', - u'maxProperties': 255, - u'mutable': True, - u'type': [u'object', - u'null']}, - u'environments': { - u'additionalProperties': {u'type': u'string'}, - u'default': {}, - u'description': u'References to Heat Environments ' - u'that can be used with current ' - u'template.', - u'filter_ops': [u'eq', - u'neq'], - u'glareType': u'LinkDict', - u'maxProperties': 255, - u'mutable': True, - u'type': [u'object', - u'null']}, - u'nested_templates': { - u'additionalProperties': - {u'additionalProperties': False, - u'properties': { - u'md5': {u'type': [u'string', u'null']}, - u'sha1': {u'type': [u'string', u'null']}, - u'sha256': {u'type': [u'string', u'null']}, - u'content_type': { - u'type': u'string'}, - u'external': {u'type': u'boolean'}, - u'size': {u'type': [u'number', - u'null']}, - u'status': {u'enum': [u'saving', - u'active'], - u'type': u'string'}}, - u'required': [u'size', - u'md5', u'sha1', u'sha256', - u'external', - u'status', - u'content_type'], - u'type': [u'object', - u'null']}, - u'default': {}, - u'description': u'Dict of nested templates where key is the ' - u'name of template and value is nested ' - u'template body.', - u'filter_ops': [], - u'glareType': u'BlobDict', - u'maxProperties': 255, - u'type': [u'object', - u'null']}, - u'template': { - u'additionalProperties': False, - u'description': u'Heat template body.', - u'filter_ops': [], - u'glareType': u'Blob', - u'properties': { - u'md5': {u'type': [u'string', u'null']}, - u'sha1': {u'type': [u'string', u'null']}, - u'sha256': {u'type': [u'string', u'null']}, - u'content_type': { - u'type': u'string'}, - u'external': {u'type': u'boolean'}, - u'size': {u'type': [u'number', - u'null']}, - u'status': {u'enum': [u'saving', - u'active'], - u'type': u'string'}}, - u'required': [u'size', - u'md5', u'sha1', u'sha256', - u'external', - u'status', - u'content_type'], - u'type': [u'object', - u'null']}, - - }), - u'version': u'1.0', - u'required': [u'name'], - u'title': u'Artifact type heat_templates of version 1.0', - u'type': u'object'}, - u'heat_environments': { - u'name': u'heat_environments', - u'properties': generate_type_props({ - u'environment': { - u'additionalProperties': False, - u'description': u'Heat Environment text body.', - u'filter_ops': [], - u'glareType': u'Blob', - u'properties': {u'md5': {u'type': [u'string', u'null']}, - u'sha1': {u'type': [u'string', u'null']}, - u'sha256': {u'type': [u'string', u'null']}, - u'content_type': {u'type': u'string'}, - u'external': {u'type': u'boolean'}, - u'size': {u'type': [u'number', - u'null']}, - u'status': {u'enum': [u'saving', - u'active'], - u'type': u'string'}}, - u'required': [u'size', - u'md5', u'sha1', u'sha256', - u'external', - u'status', - u'content_type'], - u'type': [u'object', - u'null']}, - - }), - u'required': [u'name'], - u'version': u'1.0', - u'title': u'Artifact type heat_environments of version 1.0', - u'type': u'object'}, - u'all': { - u'name': u'all', - u'properties': generate_type_props({ - u'type_name': {u'description': u'Name of artifact type.', - u'filter_ops': [u'eq', u'neq', u'in'], - u'glareType': u'String', - u'maxLength': 255, - u'type': [u'string', u'null']}, - - }), - u'required': [u'name'], - u'version': u'1.0', - u'title': u'Artifact type all of version 1.0', - u'type': u'object'} -} - - -class TestSchemas(base.TestArtifact): - def test_schemas(self): - # Get schemas for specific artifact type - for at in self.enabled_types: - result = self.get(url='/schemas/%s' % at) - self.assertEqual(fixtures[at], result['schemas'][at]) - - # Get list schemas of artifacts - result = self.get(url='/schemas') - self.assertEqual(fixtures, result['schemas']) - - # Validation of schemas - result = self.get(url='/schemas')['schemas'] - for artifact_type, schema in result.items(): - jsonschema.Draft4Validator.check_schema(schema) diff --git a/glare/tests/functional/test_scrubber.py b/glare/tests/functional/test_scrubber.py deleted file mode 100644 index bffa65a..0000000 --- a/glare/tests/functional/test_scrubber.py +++ /dev/null @@ -1,144 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -import time - -from oslo_serialization import jsonutils -from six.moves import range - -from glare.tests import functional -from glare.tests.functional import base -from glare.tests.utils import execute - - -class TestScrubber(base.TestArtifact): - - """Test that delayed_delete works and the scrubber deletes""" - - def setUp(self): - functional.FunctionalTest.setUp(self) - - self.include_scrubber = True - self.set_user('user1') - self.glare_server.deployment_flavor = 'noauth' - - self.glare_server.enabled_artifact_types = ','.join( - self.enabled_types) - self.glare_server.custom_artifact_types_modules = ( - 'glare.tests.sample_artifact') - - def _create_sample_artifact(self): - art = self.create_artifact({'name': 'test_art', - 'version': '1.0'}) - - url = '/sample_artifact/%s' % art['id'] - headers = {'Content-Type': 'application/octet-stream'} - - # upload data to blob - self.put(url=url + '/small_blob', data='aaaaaa', - headers=headers) - - # upload a couple of blobs to dict_of_blobs - self.put(url + '/dict_of_blobs/blob1', data='bbbb', - headers=headers) - self.put(url + '/dict_of_blobs/blob2', data='cccc', - headers=headers) - - # add external location - body = jsonutils.dumps( - {'url': 'https://www.apache.org/licenses/LICENSE-2.0.txt', - 'md5': "fake", 'sha1': "fake_sha", "sha256": "fake_sha256"}) - headers = {'Content-Type': - 'application/vnd+openstack.glare-custom-location+json'} - self.put(url=url + '/blob', data=body, status=200, - headers=headers) - - return url - - def test_scrubber_delayed_delete(self): - """ - Test that artifacts don't get deleted immediately and that the scrubber - scrubs them. - """ - self.start_servers(delayed_delete=True, daemon=True, - **self.__dict__.copy()) - - url = self._create_sample_artifact() - - # create another artifact - art2 = self.create_artifact({'name': 'test_art', 'version': '2.0'}) - - # delete sample artifact - self.delete(url=url) - art = self.get(url) - self.assertEqual('deleted', art['status']) - - self.wait_for_scrub(url) - - # check that the second artifact wasn't removed - art = self.get('/sample_artifact/%s' % art2['id']) - self.assertEqual('drafted', art['status']) - - def test_scrubber_app(self): - """ - Test that the scrubber script runs successfully when not in - daemon mode. - """ - self.start_servers(delayed_delete=True, - **self.__dict__.copy()) - - url = self._create_sample_artifact() - - # wait for the scrub time on the artifacts to pass - time.sleep(self.scrubber_daemon.scrub_time) - - # create another artifact - art2 = self.create_artifact({'name': 'test_art', 'version': '2.0'}) - - # delete sample artifact - self.delete(url=url) - art = self.get(url) - self.assertEqual('deleted', art['status']) - - # scrub artifacts and make sure they are deleted - exe_cmd = "%s -m glare.cmd.scrubber" % sys.executable - cmd = ("%s --config-file %s" % - (exe_cmd, self.scrubber_daemon.conf_file_name)) - exitcode, out, err = execute(cmd, raise_error=False) - self.assertEqual(0, exitcode) - - self.wait_for_scrub(url) - - # check that the second artifact wasn't removed - art = self.get('/sample_artifact/%s' % art2['id']) - self.assertEqual('drafted', art['status']) - - def wait_for_scrub(self, url): - """ - The build servers sometimes take longer than 15 seconds - to scrub. Give it up to 5 min, checking every 5 seconds. - When/if it flips to deleted, bail immediately. - """ - wait_for = 300 # seconds - check_every = 5 # seconds - for _ in range(wait_for // check_every): - time.sleep(check_every) - try: - self.get(url, status=404) - return - except Exception: - pass - else: - self.fail("Artifact wasn't scrubbed") diff --git a/glare/tests/functional/test_visibility.py b/glare/tests/functional/test_visibility.py deleted file mode 100644 index 72c0f31..0000000 --- a/glare/tests/functional/test_visibility.py +++ /dev/null @@ -1,193 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from glare.tests.functional import base - - -class TestVisibility(base.TestArtifact): - - """Test Glare artifact visibility for various users.""" - - def test_visibility_basic(self): - - self.set_user('user1') - art1 = self.create_artifact(data={'name': 'art1', 'version': 1.0}) - url = '/sample_artifact/%s' % art1['id'] - - # Artifact is visible by its owner - self.get(url=url) - - # Owner can modify the artifact - patch = [{"op": "replace", "path": "/description", "value": "dd"}] - self.patch(url=url, data=patch) - - # Artifact is not visible by another user - self.set_user('user2') - self.get(url=url, status=404) - - # Artifact is visible by admin - self.set_user('admin') - self.get(url=url) - - # Admin can update the artifact - patch = [{"op": "replace", "path": "/string_required", "value": "gg"}] - self.patch(url=url, data=patch) - - # Activate and publish the artifact - self.patch(url=url, data=self.make_active) - self.patch(url=url, data=self.make_public) - - # All users can see public artifact - - self.set_user('user1') - self.get(url=url) - - # Default policy 'update_public' forbids the owner to update public - # artifacts - patch = [{"op": "replace", "path": "/description", "value": "bb"}] - self.patch(url=url, data=patch, status=403) - - self.set_user('admin') - self.get(url=url) - - # Admin can always update public artifacts - patch = [{"op": "replace", "path": "/description", "value": "ss"}] - self.patch(url=url, data=patch) - - self.set_user('user2') - self.get(url=url) - - # Regular user cannot update public artifact - patch = [{"op": "replace", "path": "/description", "value": "aa"}] - self.patch(url=url, data=patch, status=403) - - def test_visibility_name_version(self): - self.set_user('user1') - self.create_artifact(data={'name': 'my_art', 'version': 1.0}) - - # User can't create another artifact with the same name/version - self.create_artifact(data={'name': 'my_art', 'version': 1.0}, - status=409) - - art2 = self.create_artifact(data={'name': 'your_art', 'version': 2.0}) - url = '/sample_artifact/%s' % art2['id'] - - # User can't change name and version if such artifact already exists - patch = [ - {"op": "replace", "path": "/name", "value": "my_art"}, - {"op": "replace", "path": "/version", "value": 1.0} - ] - self.patch(url=url, data=patch, status=409) - - # Another user can create an artifact with the same name/version - self.set_user("user2") - art3 = self.create_artifact(data={'name': 'my_art', 'version': 1.0}) - - # Now admin sees 2 artifacts with the same name/version - self.set_user("admin") - url = '/sample_artifact?name=my_art&version=1' - self.assertEqual(2, len(self.get(url=url)['sample_artifact'])) - - # Admin can activate and publish artifact art3 - url = '/sample_artifact/%s' % art3['id'] - patch = [{"op": "replace", "path": "/string_required", "value": "gg"}] - self.patch(url=url, data=patch) - self.patch(url=url, data=self.make_active) - self.patch(url=url, data=self.make_public) - - # After that user1 sees 2 artifacts with the same name/version as well - self.set_user("user1") - url = '/sample_artifact?name=my_art&version=1' - self.assertEqual(2, len(self.get(url=url)['sample_artifact'])) - - # User2 still sees only his public artifact - self.set_user("user2") - url = '/sample_artifact?name=my_art&version=1' - self.assertEqual(1, len(self.get(url=url)['sample_artifact'])) - - # Admin is able to create a private artifact with the same name/version - self.set_user("admin") - art4 = self.create_artifact(data={'name': 'my_art', 'version': 1.0}) - - # And he sees 3 artifacts - url = '/sample_artifact?name=my_art&version=1' - self.assertEqual(3, len(self.get(url=url)['sample_artifact'])) - - # But he can't publish his artifact, because this name/version already - # exists in public scope - url = '/sample_artifact/%s' % art4['id'] - patch = [{"op": "replace", "path": "/string_required", "value": "gg"}] - self.patch(url=url, data=patch) - self.patch(url=url, data=self.make_active) - self.patch(url=url, data=self.make_public, status=409) - - # Admin publishes artifact art2 - url = '/sample_artifact/%s' % art2['id'] - patch = [{"op": "replace", "path": "/string_required", "value": "gg"}] - self.patch(url=url, data=patch) - self.patch(url=url, data=self.make_active) - self.patch(url=url, data=self.make_public) - - # User2 can create his own private artifact with the same name/version - self.set_user("user2") - self.create_artifact(data={'name': 'your_art', 'version': 2.0}) - - def test_visibility_artifact_types(self): - self.set_user('user1') - self.create_artifact(data={'name': 'my_art', 'version': 1.0}, - type_name='images') - self.create_artifact(data={'name': 'my_art', 'version': 1.0}, - type_name='heat_templates') - self.create_artifact(data={'name': 'my_art', 'version': 1.0}, - type_name='heat_environments') - - def test_visibility_all(self): - self.set_user('user1') - art1 = self.create_artifact(data={'name': 'my_art', 'version': 1.0}, - type_name='images') - art2 = self.create_artifact(data={'name': 'my_art', 'version': 1.0}, - type_name='heat_templates') - # User 1 sees his 2 artifacts - url = '/all?name=my_art&version=1' - self.assertEqual(2, len(self.get(url=url)['all'])) - - self.set_user('user2') - self.create_artifact(data={'name': 'my_art', 'version': 1.0}, - type_name='images') - self.create_artifact(data={'name': 'my_art', 'version': 1.0}, - type_name='heat_templates') - # User 2 sees his 2 artifacts - url = '/all?name=my_art&version=1' - self.assertEqual(2, len(self.get(url=url)['all'])) - - # Admin sees 4 artifacts from both users - self.set_user("admin") - self.assertEqual(4, len(self.get(url=url)['all'])) - - # After publishing art1 and art2 user 2 can see 4 artifacts as well - url = '/sample_artifact/%s' % art1['id'] - patch = [{"op": "replace", "path": "/string_required", "value": "gg"}] - self.patch(url=url, data=patch) - self.patch(url=url, data=self.make_active) - self.patch(url=url, data=self.make_public) - - url = '/sample_artifact/%s' % art2['id'] - patch = [{"op": "replace", "path": "/string_required", "value": "gg"}] - self.patch(url=url, data=patch) - self.patch(url=url, data=self.make_active) - self.patch(url=url, data=self.make_public) - - self.set_user("user2") - url = '/all?name=my_art&version=1' - self.assertEqual(4, len(self.get(url=url)['all'])) diff --git a/glare/tests/hooks_artifact.py b/glare/tests/hooks_artifact.py deleted file mode 100644 index 5e18149..0000000 --- a/glare/tests/hooks_artifact.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import shutil - -from oslo_config import cfg -from oslo_log import log as logging -from oslo_versionedobjects import fields - -from glare.common import exception -from glare.objects import base -from glare.objects.meta import file_utils -from glare.objects.meta import wrappers - -Field = wrappers.Field.init -Dict = wrappers.DictField.init -List = wrappers.ListField.init -Blob = wrappers.BlobField.init -Folder = wrappers.FolderField.init - -CONF = cfg.CONF -LOG = logging.getLogger(__name__) - - -class HookChecker(base.BaseArtifact): - fields = { - 'zip': Blob(description="Original zipped data.", - required_on_activate=False), - 'content': Folder(system=True, required_on_activate=False), - 'forbid_activate': Field(fields.FlexibleBooleanField, - default=False), - 'forbid_publish': Field(fields.FlexibleBooleanField, - default=False, mutable=True), - 'forbid_download_zip': Field(fields.FlexibleBooleanField, - default=False), - 'forbid_delete': Field(fields.FlexibleBooleanField, - default=False, mutable=True), - } - - artifact_type_opts = base.BaseArtifact.artifact_type_opts + [ - cfg.BoolOpt('in_memory_processing') - ] - - @classmethod - def get_type_name(cls): - return "hooks_artifact" - - @classmethod - def _validate_upload_harddrive(cls, context, af, field_name, fd): - path = None - tdir = None - try: - tfile, path = file_utils.create_temporary_file(fd, '.zip') - tdir = file_utils.extract_zip_to_temporary_folder(tfile) - - # upload all files to 'content' folder - for subdir, dirs, files in os.walk(tdir): - for file_name in files: - path_to_file = os.path.join(subdir, file_name) - with open(path_to_file, "rb") as f: - file_utils.upload_content_file( - context, af, f, 'content', - path_to_file[len(tdir) + 1:]) - except Exception as e: - if path is not None and os.path.exists(path): - # remove temporary file if something went wrong - os.remove(path) - raise e - finally: - # remove temporary folder - if tdir is not None: - shutil.rmtree(tdir) - - tfile.flush() - tfile.seek(0) - return tfile, path - - @classmethod - def validate_upload(cls, context, af, field_name, fd): - if CONF.hooks_artifact.in_memory_processing: - return file_utils.unpack_zip_archive_in_memory( - context, af, 'content', fd), None - else: - return cls._validate_upload_harddrive( - context, af, field_name, fd) - - @classmethod - def validate_download(cls, context, af, field_name, fd): - if af.forbid_download_zip and field_name == 'zip': - raise exception.BadRequest - return fd, None - - @classmethod - def validate_activate(cls, context, af): - if af.forbid_activate: - raise exception.BadRequest - - @classmethod - def validate_publish(cls, context, af): - if af.forbid_publish: - raise exception.BadRequest - - @classmethod - def validate_delete(cls, context, af): - if af.forbid_delete: - raise exception.BadRequest diff --git a/glare/tests/sample_artifact.py b/glare/tests/sample_artifact.py deleted file mode 100644 index 72b68a6..0000000 --- a/glare/tests/sample_artifact.py +++ /dev/null @@ -1,132 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Sample artifact object for testing purposes""" - -from oslo_versionedobjects import fields - -from glare.objects import base as base_artifact -from glare.objects.meta import fields as glare_fields -from glare.objects.meta import validators -from glare.objects.meta import wrappers - -Field = wrappers.Field.init -Dict = wrappers.DictField.init -List = wrappers.ListField.init -Blob = wrappers.BlobField.init -Folder = wrappers.FolderField.init - - -class SampleArtifact(base_artifact.BaseArtifact): - VERSION = '1.0' - - fields = { - 'blob': Blob(required_on_activate=False, mutable=True, - description="I am Blob"), - 'small_blob': Blob(max_blob_size=10, required_on_activate=False, - mutable=True), - 'link1': Field(glare_fields.Link, - required_on_activate=False), - 'link2': Field(glare_fields.Link, - required_on_activate=False), - 'bool1': Field(fields.FlexibleBooleanField, - required_on_activate=False, - filter_ops=(wrappers.FILTER_EQ,), - default=False), - 'bool2': Field(fields.FlexibleBooleanField, - required_on_activate=False, - filter_ops=(wrappers.FILTER_EQ,), - default=False), - 'int1': Field(fields.IntegerField, - required_on_activate=False, - sortable=True), - 'int2': Field(fields.IntegerField, - sortable=True, - required_on_activate=False), - 'float1': Field(fields.FloatField, - sortable=True, - required_on_activate=False), - 'float2': Field(fields.FloatField, - sortable=True, - required_on_activate=False), - 'str1': Field(fields.StringField, - sortable=True, - required_on_activate=False), - 'list_of_str': List(fields.String, - required_on_activate=False, - filter_ops=(wrappers.FILTER_EQ, - wrappers.FILTER_IN)), - 'list_of_int': List(fields.Integer, - required_on_activate=False, - filter_ops=(wrappers.FILTER_EQ, - wrappers.FILTER_IN)), - 'dict_of_str': Dict(fields.String, - required_on_activate=False, - filter_ops=(wrappers.FILTER_EQ, - wrappers.FILTER_IN)), - 'dict_of_int': Dict(fields.Integer, - required_on_activate=False, - filter_ops=(wrappers.FILTER_EQ, - wrappers.FILTER_IN)), - 'dict_of_links': Dict(glare_fields.LinkFieldType, - mutable=True, - required_on_activate=False, - filter_ops=(wrappers.FILTER_EQ,)), - 'list_of_links': List(glare_fields.LinkFieldType, - mutable=True, - required_on_activate=False, - filter_ops=(wrappers.FILTER_EQ,)), - 'dict_of_blobs': Folder(required_on_activate=False, - max_folder_size=2000, - validators=[ - validators.MaxDictKeyLen(1000)]), - 'string_mutable': Field(fields.StringField, - required_on_activate=False, - mutable=True), - 'string_required': Field(fields.StringField, - required_on_activate=True), - 'string_validators': Field(fields.StringField, - required_on_activate=False, - validators=[ - validators.AllowedValues( - ['aa', 'bb', 'c' * 11]), - validators.MaxStrLen(10) - ]), - 'int_validators': Field(fields.IntegerField, - required_on_activate=False, - validators=[ - validators.MinNumberSize(10), - validators.MaxNumberSize(20) - ]), - 'list_validators': List(fields.String, - required_on_activate=False, - filter_ops=[], - max_size=3, - validators=[validators.Unique()]), - 'dict_validators': Dict(fields.String, - required_on_activate=False, - default=None, - filter_ops=[], - validators=[ - validators.AllowedDictKeys([ - 'abc', 'def', 'ghi', 'jkl'])], - max_size=3), - 'system_attribute': Field(fields.StringField, - system=True, sortable=True, - default="default") - } - - @classmethod - def get_type_name(cls): - return "sample_artifact" diff --git a/glare/tests/unit/__init__.py b/glare/tests/unit/__init__.py deleted file mode 100644 index e48f2cf..0000000 --- a/glare/tests/unit/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2016 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import oslo_i18n as i18n - - -def fake_translate_msgid(msgid, domain, desired_locale=None): - return msgid - -i18n.enable_lazy() - -# To ensure messages don't really get translated while running tests. -# As there are lots of places where matching is expected when comparing -# exception message(translated) with raw message. -i18n._translate_msgid = fake_translate_msgid diff --git a/glare/tests/unit/api/__init__.py b/glare/tests/unit/api/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/tests/unit/api/test_create.py b/glare/tests/unit/api/test_create.py deleted file mode 100644 index 36b230c..0000000 --- a/glare/tests/unit/api/test_create.py +++ /dev/null @@ -1,220 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from glare.common import exception as exc -from glare.tests.unit import base - - -class TestArtifactCreate(base.BaseTestArtifactAPI): - - """Test Glare artifact creation.""" - - def test_create_artifact_minimal(self): - - for name in ['ttt', 'tt:t', 'tt t', 'tt: t', 'tt,t']: - values = {'name': name} - - res = self.controller.create(self.req, 'sample_artifact', values) - self.assertEqual(name, res['name']) - self.assertEqual('0.0.0', res['version']) - self.assertEqual(self.users['user1']['tenant_id'], res['owner']) - self.assertEqual('drafted', res['status']) - self.assertEqual('private', res['visibility']) - self.assertEqual('', res['description']) - self.assertEqual({}, res['metadata']) - self.assertEqual([], res['tags']) - - def test_create_artifact_with_version(self): - values = {'name': 'name', 'version': '1.0'} - res = self.controller.create(self.req, 'sample_artifact', values) - self.assertEqual('name', res['name']) - self.assertEqual('1.0.0', res['version']) - - values = {'name': 'name', 'version': '1:0'} - res = self.controller.create(self.req, 'sample_artifact', values) - self.assertEqual('1.0.0-0', res['version']) - - values = {'name': 'name', 'version': '1:0:0'} - res = self.controller.create(self.req, 'sample_artifact', values) - self.assertEqual('1.0.0-0-0', res['version']) - - values = {'name': 'name', 'version': '2:0-0'} - res = self.controller.create(self.req, 'sample_artifact', values) - self.assertEqual('2.0.0-0-0', res['version']) - - def test_create_artifact_with_fields(self): - values = {'name': 'ttt', 'version': '1.0', - 'description': "Test Artifact", 'tags': ['a', 'a', 'b'], - 'metadata': {'type': 'image'}} - - res = self.controller.create(self.req, 'sample_artifact', values) - self.assertEqual('ttt', res['name']) - self.assertEqual('1.0.0', res['version']) - self.assertEqual(self.users['user1']['tenant_id'], res['owner']) - self.assertEqual('drafted', res['status']) - self.assertEqual('private', res['visibility']) - self.assertEqual('Test Artifact', res['description']) - self.assertEqual({'type': 'image'}, res['metadata']) - self.assertEqual({'a', 'b'}, set(res['tags'])) - - def test_create_no_artifact_type(self): - values = {'name': 'ttt'} - - self.assertRaises(exc.NotFound, self.controller.create, - self.req, 'wrong_type', values) - - def test_create_artifact_no_name(self): - values = {'version': '1.0'} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - def test_create_artifact_wrong_parameters(self): - values = {'name': 'test', 'version': 'invalid_format'} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'version': -1} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'version': ':'} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': '', 'version': '1.0'} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'a' * 256} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'description': 'a' * 4097} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'tags': ['a' * 256]} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'tags': ['']} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'tags': ['a/a']} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'tags': ['a,a']} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'tags': [str(i) for i in range(256)]} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'metadata': {'key': 'a' * 256}} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'metadata': {'': 'a'}} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'metadata': {'a' * 256: 'a'}} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', - 'metadata': {('a' + str(i)): 'a' for i in range(256)}} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - def test_create_artifact_not_existing_field(self): - values = {'name': 'test', 'not_exist': 'some_value'} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', '': 'a'} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - def test_create_artifact_blob(self): - values = {'name': 'test', 'blob': 'DATA'} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - def test_create_artifact_system_fields(self): - values = {'name': 'test', - 'id': '5fdeba9a-ba12-4147-bb8a-a8daada84222'} - self.assertRaises(exc.Forbidden, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'created_at': '2000-01-01'} - self.assertRaises(exc.Forbidden, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'updated_at': '2000-01-01'} - self.assertRaises(exc.Forbidden, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'activated_at': '2000-01-01'} - self.assertRaises(exc.Forbidden, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'owner': 'new_owner'} - self.assertRaises(exc.Forbidden, self.controller.create, - self.req, 'sample_artifact', values) - - def test_create_artifact_status_and_visibility(self): - values = {'name': 'test', 'status': 'activated'} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - values = {'name': 'test', 'visibility': 'public'} - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'sample_artifact', values) - - def test_create_artifact_unicode(self): - name = u'\u0442\u0435\u0441\u0442' - description = u'\u041E\u043F\u0438\u0441\u0430\u043D\u0438\u0435' - tags = [u'\u041C\u0435\u0442\u043A\u0430'] - metadata = {'key': u'\u0417\u043D\u0430\u0447\u0435\u043D\u0438\u0435'} - values = { - 'name': name, - 'version': '1.0', - 'description': description, - 'tags': tags, - 'metadata': metadata - } - - res = self.controller.create(self.req, 'images', values) - self.assertEqual(name, res['name']) - self.assertEqual('1.0.0', res['version']) - self.assertEqual(self.users['user1']['tenant_id'], res['owner']) - self.assertEqual('drafted', res['status']) - self.assertEqual('private', res['visibility']) - self.assertEqual(description, res['description']) - self.assertEqual(metadata, res['metadata']) - self.assertEqual(tags, res['tags']) - - def test_create_artifact_4_byte_unicode(self): - bad_name = u'A name with forbidden symbol \U0001f62a' - values = { - 'name': bad_name, - 'version': '1.0', - } - - self.assertRaises(exc.BadRequest, self.controller.create, - self.req, 'images', values) diff --git a/glare/tests/unit/api/test_delete.py b/glare/tests/unit/api/test_delete.py deleted file mode 100644 index 978b23f..0000000 --- a/glare/tests/unit/api/test_delete.py +++ /dev/null @@ -1,199 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import mock -from six import BytesIO - -from glare.common import exception as exc -from glare.common import store_api -from glare.db import artifact_api -from glare.tests.unit import base - - -class TestArtifactUpdate(base.BaseTestArtifactAPI): - - """Test Glare artifact deletion.""" - - def setUp(self): - super(TestArtifactUpdate, self).setUp() - values = {'name': 'ttt', 'version': '1.0'} - self.artifact = self.controller.create( - self.req, 'sample_artifact', values) - # Upload data - self.controller.upload_blob( - self.req, 'sample_artifact', self.artifact['id'], 'blob', - BytesIO(b'a' * 100), 'application/octet-stream') - # Check that data was uploaded successfully - self.artifact = self.controller.show( - self.req, 'sample_artifact', self.artifact['id']) - self.assertEqual(100, self.artifact['blob']['size']) - self.assertEqual('active', self.artifact['blob']['status']) - - @mock.patch('glare.common.store_api.delete_blob', - side_effect=store_api.delete_blob) - def test_delete_with_data(self, mocked_delete): - # Delete artifact and check that 'delete_blob' was called - self.controller.delete(self.req, 'sample_artifact', - self.artifact['id']) - self.assertRaises(exc.NotFound, self.controller.show, - self.req, 'sample_artifact', self.artifact['id']) - self.assertEqual(1, mocked_delete.call_count) - - @mock.patch('glare.common.store_api.delete_blob', - side_effect=store_api.delete_blob) - def test_delete_with_blob_dict(self, mocked_delete): - # Upload data - for i in range(10): - self.controller.upload_blob( - self.req, 'sample_artifact', self.artifact['id'], - 'dict_of_blobs/blob%d' % i, - BytesIO(b'a' * 100), 'application/octet-stream') - # Check that data was uploaded successfully - self.artifact = self.controller.show( - self.req, 'sample_artifact', self.artifact['id']) - for i in range(10): - self.assertEqual( - 100, - self.artifact['dict_of_blobs']['blob%d' % i]['size']) - self.assertEqual( - 'active', - self.artifact['dict_of_blobs']['blob%d' % i]['status']) - # Delete artifact and check that 'delete_blob' was called for each blob - # 10 times for blob dict elements and once for 'blob' - self.controller.delete(self.req, 'sample_artifact', - self.artifact['id']) - self.assertRaises(exc.NotFound, self.controller.show, - self.req, 'sample_artifact', self.artifact['id']) - self.assertEqual(11, mocked_delete.call_count) - - def test_delete_not_found(self): - self.assertRaises(exc.NotFound, self.controller.delete, - self.req, 'sample_artifact', 'INVALID_ID') - - def test_delete_saving_blob(self): - blob = self.artifact['blob'] - # Change status of the blob to 'saving' - blob['status'] = 'saving' - artifact_api.ArtifactAPI().update_blob( - self.req.context, self.artifact['id'], {'blob': blob}) - self.artifact = self.controller.show( - self.req, 'sample_artifact', self.artifact['id']) - blob = self.artifact['blob'] - self.assertEqual(100, blob['size']) - self.assertEqual('saving', blob['status']) - # Deleting of the artifact leads to Conflict error - self.assertRaises(exc.Conflict, self.controller.delete, - self.req, 'sample_artifact', self.artifact['id']) - self.artifact = self.controller.show( - self.req, 'sample_artifact', self.artifact['id']) - self.assertEqual('drafted', self.artifact['status']) - - def test_delete_deleted_artifact(self): - # Change status of the artifact to 'deleted' - artifact_api.ArtifactAPI().save( - self.req.context, self.artifact['id'], {'status': 'deleted'}) - # Delete should work properly - self.controller.delete(self.req, 'sample_artifact', - self.artifact['id']) - self.assertRaises(exc.NotFound, self.controller.show, - self.req, 'sample_artifact', self.artifact['id']) - - @mock.patch('glare.common.store_api.delete_blob', - side_effect=exc.NotFound) - def test_delete_link_not_exist(self, mocked_delete): - # Delete artifact and check that 'delete_blob' was called - self.controller.delete(self.req, 'sample_artifact', - self.artifact['id']) - self.assertRaises(exc.NotFound, self.controller.show, - self.req, 'sample_artifact', self.artifact['id']) - self.assertEqual(1, mocked_delete.call_count) - - @mock.patch('glare.common.store_api.delete_blob', - side_effect=exc.Forbidden) - def test_no_delete_permission(self, mocked_delete): - # Try to delete artifact - self.assertRaises(exc.Forbidden, self.controller.delete, - self.req, 'sample_artifact', self.artifact['id']) - - @mock.patch('glare.common.store_api.delete_blob', - side_effect=exc.GlareException) - def test_delete_unknown_store_exception(self, mocked_delete): - # Try to delete artifact - self.assertRaises(exc.GlareException, self.controller.delete, - self.req, 'sample_artifact', self.artifact['id']) - - @mock.patch('glare.common.store_api.delete_blob', - side_effect=exc.NotFound) - def test_delete_blob_not_found(self, mocked_delete): - # Upload a file to blob dict - self.controller.upload_blob( - self.req, 'sample_artifact', self.artifact['id'], - 'dict_of_blobs/blob', - BytesIO(b'a' * 100), 'application/octet-stream') - - # Despite the exception artifact should be deleted successfully - self.controller.delete(self.req, 'sample_artifact', - self.artifact['id']) - self.assertRaises(exc.NotFound, self.controller.show, - self.req, 'sample_artifact', self.artifact['id']) - self.assertEqual(2, mocked_delete.call_count) - - @mock.patch('glare.common.store_api.delete_blob', - side_effect=store_api.delete_blob) - def test_delayed_delete_global(self, mocked_delete): - # Enable delayed delete - self.config(delayed_delete=True) - # Delete artifact and check that 'delete_blob' was not called - self.controller.delete(self.req, 'sample_artifact', - self.artifact['id']) - self.assertEqual(0, mocked_delete.call_count) - # Check that artifact status is 'deleted' and its blob is - # 'pending_delete' - self.artifact = self.controller.show( - self.req, 'sample_artifact', self.artifact['id']) - self.assertEqual('deleted', self.artifact['status']) - self.assertEqual('active', self.artifact['blob']['status']) - # Disable delayed delete - self.config(delayed_delete=False) - # Delete artifact and check that 'delete_blob' was called this time - self.controller.delete(self.req, 'sample_artifact', - self.artifact['id']) - self.assertEqual(1, mocked_delete.call_count) - self.assertRaises(exc.NotFound, self.controller.show, - self.req, 'sample_artifact', self.artifact['id']) - - @mock.patch('glare.common.store_api.delete_blob', - side_effect=store_api.delete_blob) - def test_delayed_delete_per_artifact_type(self, mocked_delete): - # Enable delayed delete for sample_artifact type - # Global parameter is disabled - self.config(delayed_delete=True, group='sample_artifact') - # Delete artifact and check that 'delete_blob' was not called - self.controller.delete(self.req, 'sample_artifact', - self.artifact['id']) - self.assertEqual(0, mocked_delete.call_count) - # Check that artifact status is 'deleted' and its blob is - # 'pending_delete' - self.artifact = self.controller.show( - self.req, 'sample_artifact', self.artifact['id']) - self.assertEqual('deleted', self.artifact['status']) - self.assertEqual('active', self.artifact['blob']['status']) - # Disable delayed delete - self.config(delayed_delete=False, group='sample_artifact') - # Delete artifact and check that 'delete_blob' was called this time - self.controller.delete(self.req, 'sample_artifact', - self.artifact['id']) - self.assertEqual(1, mocked_delete.call_count) - self.assertRaises(exc.NotFound, self.controller.show, - self.req, 'sample_artifact', self.artifact['id']) diff --git a/glare/tests/unit/api/test_download.py b/glare/tests/unit/api/test_download.py deleted file mode 100644 index 1286843..0000000 --- a/glare/tests/unit/api/test_download.py +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from six import BytesIO - -from glare.common import exception as exc -from glare.db import artifact_api -from glare.tests.unit import base - - -class TestArtifactDownload(base.BaseTestArtifactAPI): - def setUp(self): - super(TestArtifactDownload, self).setUp() - values = {'name': 'ttt', 'version': '1.0', 'string_required': 'str2'} - self.sample_artifact = self.controller.create( - self.req, 'sample_artifact', values) - - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], 'blob', - BytesIO(b'aaa'), 'application/octet-stream') - - artifact = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual(3, artifact['blob']['size']) - self.assertEqual('active', artifact['blob']['status']) - - def test_download_basic(self): - downloaded_blob = self.controller.download_blob( - self.req, 'sample_artifact', - self.sample_artifact['id'], 'blob') - self.assertEqual(b'aaa', downloaded_blob['data'].data) - - def test_download_from_folders(self): - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/folder1', - BytesIO(b'bbb'), 'application/octet-stream') - downloaded_blob = self.controller.download_blob( - self.req, 'sample_artifact', - self.sample_artifact['id'], 'dict_of_blobs/folder1') - self.assertEqual(b'bbb', downloaded_blob['data'].data) - - # Negative dict_of_blobs tests: - # Key error - self.assertRaises(exc.NotFound, self.controller.download_blob, - self.req, 'sample_artifact', - self.sample_artifact['id'], - "dict_of_blobs/ImaginaryFolder") - - # incorrect dict_of_blobs spelling - self.assertRaises(exc.BadRequest, self.controller.download_blob, - self.req, 'sample_artifact', - self.sample_artifact['id'], - "NOT_DICT_FIELD/folder1") - - def test_download_from_non_existing_fields(self): - self.assertRaises(exc.BadRequest, self.controller.download_blob, - self.req, 'sample_artifact', - self.sample_artifact['id'], "NON_EXISTING_FIELD") - - def test_download_of_saving_blob(self): - self.sample_artifact = self.controller.show( - self.req, 'sample_artifact', self.sample_artifact['id']) - - # Change status of the blob to 'saving' - self.sample_artifact['blob']['status'] = 'saving' - artifact_api.ArtifactAPI().update_blob( - self.req.context, self.sample_artifact['id'], - {'blob': self.sample_artifact['blob']}) - - self.sample_artifact = self.controller.show( - self.req, 'sample_artifact', self.sample_artifact['id']) - self.assertEqual('saving', self.sample_artifact['blob']['status']) - - # assert that we can't download while blob in saving status - self.assertRaises(exc.Conflict, self.controller.download_blob, - self.req, 'sample_artifact', - self.sample_artifact['id'], "blob") - - def test_download_from_deactivated_artifact_as_other_user(self): - self.req = self.get_fake_request(user=self.users['admin']) - art = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - # change artifact status to deactivted: draft-> activate -> deactivated - for status in ['active', 'deactivated']: - changes = [{'op': 'replace', 'path': '/status', 'value': status}] - self.req = self.get_fake_request(user=self.users['admin']) - art = self.update_with_values(changes, art_id=art['id']) - - # make request from other user (That didn't create the artifact) - self.req = self.get_fake_request(user=self.users['user1']) - self.assertRaises(exc.Forbidden, self.controller.download_blob, - self.req, 'sample_artifact', - art['id'], "blob") - # Make sure that admin can download from deactivated artifact - self.req = self.get_fake_request(user=self.users['admin']) - downloaded_blob = self.controller.download_blob( - self.req, 'sample_artifact', art['id'], 'blob') - self.assertEqual(b'aaa', downloaded_blob['data'].data) - - def test_download_for_deleted_artifact(self): - self.config(delayed_delete=True) - self.controller.delete(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertRaises(exc.Forbidden, self.controller.download_blob, - self.req, 'sample_artifact', - self.sample_artifact['id'], "blob") - - def test_download_external_blob(self): - values = {'name': 'aaa', 'version': '2.0'} - url = "http: // FAKE_LOCATION.COM" - content_type = 'application/vnd+openstack.glare-custom-location+json' - art = self.controller.create(self.req, 'sample_artifact', values) - body = {'url': url, 'md5': "fake"} - self.controller.upload_blob(self.req, 'sample_artifact', art['id'], - 'blob', body, content_type) - downloaded_blob = self.controller.download_blob(self.req, - 'sample_artifact', - art['id'], 'blob') - - self.assertEqual(url, downloaded_blob['data']['url']) - self.assertTrue(downloaded_blob['meta']['external']) - self.assertEqual("fake", downloaded_blob['meta']['md5']) - self.assertIsNone(downloaded_blob['meta']['sha1']) - self.assertIsNone(downloaded_blob['meta']['sha256']) diff --git a/glare/tests/unit/api/test_list.py b/glare/tests/unit/api/test_list.py deleted file mode 100644 index a5f087a..0000000 --- a/glare/tests/unit/api/test_list.py +++ /dev/null @@ -1,586 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from glare.common import exception as exc -from glare.tests import sample_artifact -from glare.tests.unit import base - -import random - - -class TestArtifactList(base.BaseTestArtifactAPI): - - def test_list_simple_fields(self): - # Create a bunch of artifacts for list testing - values = [ - {'name': 'art1', 'version': '0.0.1', 'string_required': 'str1', - 'int1': 5, 'float1': 5.0, 'bool1': 'yes'}, - {'name': 'art1', 'version': '1-beta', 'string_required': 'str2', - 'int1': 6, 'float1': 6.0, 'bool1': 'yes'}, - {'name': 'art1', 'version': '1', 'string_required': 'str1', - 'int1': 5, 'float1': 5.0, 'bool1': 'no', 'description': 'ggg'}, - {'name': 'art1', 'version': '2-rc1', 'string_required': 'str22', - 'int1': 7, 'float1': 7.0, 'bool1': 'yes'}, - {'name': 'art1', 'version': '10', 'string_required': 'str222', - 'int1': 5, 'float1': 5.0, 'bool1': 'yes'}, - {'name': 'art2', 'version': '1', 'string_required': 'str1', - 'int1': 8, 'float1': 8.0, 'bool1': 'no'}, - {'name': 'art3', 'version': '1', 'string_required': 'str1', - 'int1': -5, 'float1': -5.0, 'bool1': 'yes'}, - ] - arts = [self.controller.create(self.req, 'sample_artifact', val) - for val in values] - - # Activate 3rd and 4th artifacts - changes = [{'op': 'replace', 'path': '/status', 'value': 'active'}] - arts[3] = self.update_with_values(changes, art_id=arts[3]['id']) - arts[4] = self.update_with_values(changes, art_id=arts[4]['id']) - - # Publish 4th artifact - changes = [{'op': 'replace', 'path': '/visibility', 'value': 'public'}] - self.req = self.get_fake_request(user=self.users['admin']) - arts[4] = self.update_with_values(changes, art_id=arts[4]['id']) - self.req = self.get_fake_request(user=self.users['user1']) - - # Do tests basic tests - # input format for filters is a list of tuples: - # (filter_name, filter_value) - - # List all artifacts - res = self.controller.list(self.req, 'sample_artifact') - self.assertEqual(7, len(res['artifacts'])) - self.assertEqual('sample_artifact', res['type_name']) - - # List all artifacts as an anonymous. Only public artifacts are visible - anon_req = self.get_fake_request(user=self.users['anonymous']) - res = self.controller.list(anon_req, 'sample_artifact') - self.assertEqual(1, len(res['artifacts'])) - self.assertIn(arts[4], res['artifacts']) - - # Filter by name - filters = [('name', 'art1')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(5, len(res['artifacts'])) - - filters = [('name', 'in:art2,art3')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(2, len(res['artifacts'])) - for i in (5, 6): - self.assertIn(arts[i], res['artifacts']) - - # Filter by string_required - filters = [('string_required', 'str1')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(4, len(res['artifacts'])) - for i in (0, 2, 5, 6): - self.assertIn(arts[i], res['artifacts']) - - # Filter by int1 - filters = [('int1', '5')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 2, 4): - self.assertIn(arts[i], res['artifacts']) - - filters = [('int1', 'in:5,6')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(4, len(res['artifacts'])) - for i in (0, 1, 2, 4): - self.assertIn(arts[i], res['artifacts']) - - # Filter by float1 - filters = [('float1', '5.0')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 2, 4): - self.assertIn(arts[i], res['artifacts']) - - # Filter by bool1 - filters = [('bool1', 'yes')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(5, len(res['artifacts'])) - for i in (0, 1, 3, 4, 6): - self.assertIn(arts[i], res['artifacts']) - - # Filter by id - filters = [('id', arts[0]['id'])] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(1, len(res['artifacts'])) - self.assertIn(arts[0], res['artifacts']) - - # Filter by status - filters = [('status', 'active')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(2, len(res['artifacts'])) - for i in (3, 4): - self.assertIn(arts[i], res['artifacts']) - - # Filter by visibility - filters = [('visibility', 'public')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(1, len(res['artifacts'])) - self.assertIn(arts[4], res['artifacts']) - - # Filter by owner - filters = [('owner', arts[0]['owner'])] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(7, len(res['artifacts'])) - for i in range(6): - self.assertIn(arts[i], res['artifacts']) - - # Filter by description leads to BadRequest - filters = [('description', 'ggg')] - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - # Filter by created_at with eq operator leads to BadRequest - filters = [('created_at', arts[4]['created_at'])] - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - # Filter by updated_at with eq operator leads to BadRequest - filters = [('updated_at', arts[4]['updated_at'])] - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - # Filter by activated_at with eq operator leads to BadRequest - filters = [('activated_at', arts[4]['activated_at'])] - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - # Filter by any blob leads to BadRequest - filters = [('blob', 'something')] - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - # Filter by nonexistent field leads to BadRequest - filters = [('NONEXISTENT', 'something')] - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - def test_list_marker_and_limit(self): - # Create artifacts - art_list = [ - self.controller.create( - self.req, 'sample_artifact', - {'name': 'name%s' % i, - 'version': '%d.0' % i, - 'tags': ['tag%s' % i], - 'int1': 1024 + i, - 'float1': 123.456, - 'str1': 'bugaga', - 'bool1': True}) - for i in range(5)] - - # sort with 'next_marker' - sort = [('int1', 'asc'), ('name', 'desc')] - result = self.controller.list(self.req, 'sample_artifact', filters=(), - limit=1, sort=sort) - self.assertEqual([art_list[0]], result['artifacts']) - marker = result['next_marker'] - result = self.controller.list(self.req, 'sample_artifact', filters=(), - marker=marker, limit=1, sort=sort) - self.assertEqual([art_list[1]], result['artifacts']) - - # sort by custom marker - sort = [('int1', 'asc')] - marker = art_list[1]['id'] - result = self.controller.list(self.req, 'sample_artifact', filters=(), - marker=marker, sort=sort) - self.assertEqual(art_list[2:], result['artifacts']) - - sort = [('int1', 'desc')] - result = self.controller.list(self.req, 'sample_artifact', filters=(), - marker=marker, sort=sort) - self.assertEqual(art_list[:1], result['artifacts']) - - sort = [('float1', 'asc'), ('name', 'desc')] - result = self.controller.list(self.req, 'sample_artifact', filters=(), - marker=marker, sort=sort) - self.assertEqual([art_list[0]], result['artifacts']) - - # paginate by name in desc order with limit 2 - sort = [('name', 'desc')] - result = self.controller.list(self.req, 'sample_artifact', filters=(), - limit=2, sort=sort) - self.assertEqual(art_list[4:2:-1], result['artifacts']) - - marker = result['next_marker'] - result = self.controller.list(self.req, 'sample_artifact', filters=(), - marker=marker, limit=2, sort=sort) - self.assertEqual(art_list[2:0:-1], result['artifacts']) - - marker = result['next_marker'] - result = self.controller.list(self.req, 'sample_artifact', filters=(), - marker=marker, limit=2, sort=sort) - self.assertEqual([art_list[0]], result['artifacts']) - - # paginate by version in desc order with limit 2 - sort = [('version', 'desc')] - result = self.controller.list(self.req, 'sample_artifact', filters=(), - limit=2, sort=sort) - self.assertEqual(art_list[4:2:-1], result['artifacts']) - - marker = result['next_marker'] - result = self.controller.list(self.req, 'sample_artifact', filters=(), - marker=marker, limit=2, sort=sort) - self.assertEqual(art_list[2:0:-1], result['artifacts']) - - marker = result['next_marker'] - result = self.controller.list(self.req, 'sample_artifact', filters=(), - marker=marker, limit=2, sort=sort) - self.assertEqual([art_list[0]], result['artifacts']) - - def test_list_version(self): - values = [ - {'name': 'art1', 'version': '0.0.1'}, - {'name': 'art1', 'version': '1-beta'}, - {'name': 'art1', 'version': '1'}, - {'name': 'art1', 'version': '10-rc1'}, - {'name': 'art1', 'version': '10'}, - {'name': 'art2', 'version': '1'}, - {'name': 'art3', 'version': '1'}, - ] - - arts = [self.controller.create(self.req, 'sample_artifact', val) - for val in values] - - # List all artifacts - res = self.controller.list(self.req, 'sample_artifact', []) - self.assertEqual(7, len(res['artifacts'])) - self.assertEqual('sample_artifact', res['type_name']) - - # Get latest artifacts - res = self.controller.list(self.req, 'sample_artifact', [], - latest=True) - self.assertEqual(3, len(res['artifacts'])) - for i in (4, 5, 6): - self.assertIn(arts[i], res['artifacts']) - - # Various version filters - filters = [('version', '1')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (2, 5, 6): - self.assertIn(arts[i], res['artifacts']) - - filters = [('version', '1'), ('name', 'art1')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(1, len(res['artifacts'])) - self.assertIn(arts[2], res['artifacts']) - - filters = [('version', 'gt:1')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(2, len(res['artifacts'])) - for i in (3, 4): - self.assertIn(arts[i], res['artifacts']) - - filters = [('version', 'gte:1')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(5, len(res['artifacts'])) - for i in (2, 3, 4, 5, 6): - self.assertIn(arts[i], res['artifacts']) - - filters = [('version', 'lte:1')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(5, len(res['artifacts'])) - for i in (0, 1, 2, 5, 6): - self.assertIn(arts[i], res['artifacts']) - - filters = [('version', 'gt:1-beta'), ('version', 'lt:10')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(4, len(res['artifacts'])) - for i in (2, 3, 5, 6): - self.assertIn(arts[i], res['artifacts']) - - filters = [('version', 'in:0.0.1,10-rc1')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(2, len(res['artifacts'])) - for i in (0, 3): - self.assertIn(arts[i], res['artifacts']) - - # Filter by invalid version - filters = [('version', 'INVALID_VERSION')] - self. assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - # Filter by invalid operator - filters = [('version', 'INVALID_op:1')] - self. assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - def test_list_compound_fields(self): - # Create a bunch of artifacts for list testing - values = [ - {'name': 'art1', - 'dict_of_str': {'a': 'aa', 'b': 'bb'}, - 'dict_of_int': {'one': 1, 'two': 2}, - 'list_of_str': ['aa', 'bb'], - 'list_of_int': [1, 2]}, - {'name': 'art2', - 'dict_of_str': {'b': 'bb', 'c': 'cc'}, - 'dict_of_int': {'two': 2, 'three': 3}, - 'list_of_str': ['bb', 'cc'], - 'list_of_int': [2, 3]}, - {'name': 'art3', - 'dict_of_str': {'a': 'aa', 'c': 'cc'}, - 'dict_of_int': {'one': 1, 'three': 3}, - 'list_of_str': ['aa', 'cc'], - 'list_of_int': [1, 3]}, - {'name': 'art4', - 'dict_of_str': {'a': 'bb'}, - 'dict_of_int': {'one': 2}, - 'list_of_str': ['aa'], - 'list_of_int': [1]}, - {'name': 'art5', - 'dict_of_str': {'b': 'bb'}, - 'dict_of_int': {'two': 2}, - 'list_of_str': ['bb'], - 'list_of_int': [2]}, - {'name': 'art6', - 'dict_of_str': {}, - 'dict_of_int': {}, - 'list_of_str': [], - 'list_of_int': []}, - ] - arts = [self.controller.create(self.req, 'sample_artifact', val) - for val in values] - - # List all artifacts - res = self.controller.list(self.req, 'sample_artifact', []) - self.assertEqual(6, len(res['artifacts'])) - self.assertEqual('sample_artifact', res['type_name']) - - # Return artifacts that contain key 'a' in 'dict_of_str' - filters = [('dict_of_str', 'eq:a')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 2, 3): - self.assertIn(arts[i], res['artifacts']) - - # Return artifacts that contain key 'a' or 'c' in 'dict_of_str' - filters = [('dict_of_str', 'in:a,c')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(4, len(res['artifacts'])) - for i in (0, 1, 2, 3): - self.assertIn(arts[i], res['artifacts']) - - # Filter with invalid operator leads to BadRequest - filters = [('dict_of_str', 'invalid:a')] - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - # Return artifacts that contain key one in 'dict_of_int' - filters = [('dict_of_int', 'eq:one')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 2, 3): - self.assertIn(arts[i], res['artifacts']) - - # Return artifacts that contain key one or three in 'dict_of_int' - filters = [('dict_of_int', 'in:one,three')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(4, len(res['artifacts'])) - for i in (0, 1, 2, 3): - self.assertIn(arts[i], res['artifacts']) - - # Filter by dicts values - # Return artifacts that contain value 'bb' in 'dict_of_str[b]' - filters = [('dict_of_str.b', 'eq:bb')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 1, 4): - self.assertIn(arts[i], res['artifacts']) - - # Return artifacts that contain values 'aa' or 'bb' in 'dict_of_str[a]' - filters = [('dict_of_str.a', 'in:aa,bb')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 2, 3): - self.assertIn(arts[i], res['artifacts']) - - # Filter with invalid operator leads to BadRequest - filters = [('dict_of_str.a', 'invalid:aa')] - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - # Return artifacts that contain value '2' in 'dict_of_int[two]' - filters = [('dict_of_int.two', 'eq:2')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 1, 4): - self.assertIn(arts[i], res['artifacts']) - - # Return artifacts that contain values '1' or '2' in 'dict_of_int[one]' - filters = [('dict_of_int.one', 'in:1,2')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 2, 3): - self.assertIn(arts[i], res['artifacts']) - - # Filter with invalid operator leads to BadRequest - filters = [('dict_of_int.one', 'invalid:1')] - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - # Filter by nonexistent dict leads to BadRequest - filters = [('NOTEXIST.one', 'eq:1')] - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - # Test with TypeError - filters = [('dict_of_int.1', 'lala')] - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - # Return artifacts that contain key 'aa' in 'list_of_str' - filters = [('list_of_str', 'eq:aa')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 2, 3): - self.assertIn(arts[i], res['artifacts']) - - # Return artifacts that contain key 'aa' or 'cc' in 'list_of_str' - filters = [('list_of_str', 'in:aa,cc')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(4, len(res['artifacts'])) - for i in (0, 1, 2, 3): - self.assertIn(arts[i], res['artifacts']) - - # Filter with invalid operator leads to BadRequest - filters = [('list_of_str', 'invalid:aa')] - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - # Return artifacts that contain key 1 in 'list_of_int' - filters = [('list_of_int', 'eq:1')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 2, 3): - self.assertIn(arts[i], res['artifacts']) - - # Return artifacts that contain key 1 or three in 'list_of_int' - filters = [('list_of_int', 'in:1,3')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(4, len(res['artifacts'])) - for i in (0, 1, 2, 3): - self.assertIn(arts[i], res['artifacts']) - - def test_filter_by_tags(self): - values = [ - {'name': 'name1', 'tags': ['tag1', 'tag2']}, - {'name': 'name2', 'tags': ['tag1', 'tag3']}, - {'name': 'name3', 'tags': ['tag1']}, - {'name': 'name4', 'tags': ['tag2']}, - {'name': 'name5', 'tags': ['tag4']}, - {'name': 'name6', 'tags': ['tag4', 'tag5']}, - ] - arts = [self.controller.create(self.req, 'sample_artifact', val) - for val in values] - - filters = [('tags', 'tag1')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 1, 2): - self.assertIn(arts[i], res['artifacts']) - - filters = [('tags', 'tag1,tag2')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(1, len(res['artifacts'])) - self.assertIn(arts[0], res['artifacts']) - - filters = [('tags', 'NOT_A_TAG')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(0, len(res['artifacts'])) - - filters = [('tags-any', 'tag1')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 1, 2): - self.assertIn(arts[i], res['artifacts']) - - filters = [('tags-any', 'tag1,NOT_A_TAG')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 1, 2): - self.assertIn(arts[i], res['artifacts']) - - filters = [('tags-any', 'tag2,tag5')] - res = self.controller.list(self.req, 'sample_artifact', filters) - self.assertEqual(3, len(res['artifacts'])) - for i in (0, 3, 5): - self.assertIn(arts[i], res['artifacts']) - - # Filtering by tags with operators leads to BadRequest - for f in ('tags', 'tags-any'): - filters = [(f, 'eq:tag1')] - self.assertRaises( - exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', filters) - - def test_list_and_sort_fields(self): - amount = 7 - # Create a bunch of artifacts for list sorting tests - names = random.sample(["art%d" % i for i in range(amount)], amount) - floats = random.sample([0.01 * i for i in range(amount)], amount) - ints = random.sample([1 * i for i in range(amount)], amount) - strings = random.sample(["str%d" % i for i in range(amount)], amount) - versions = random.sample(["0.%d" % i for i in range(amount)], amount) - for i in range(amount): - val = {'name': names[i], 'float1': floats[i], 'int1': ints[i], - 'str1': strings[i], 'version': versions[i]} - self.controller.create(self.req, 'sample_artifact', val) - - fields = ['name', 'id', 'visibility', 'version', 'float1', 'int1', - 'str1'] - - for sort_name in fields: - for sort_dir in ['asc', 'desc']: - arts = self.controller.list( - self.req, 'sample_artifact', [], - sort=[(sort_name, sort_dir)])['artifacts'] - self.assertEqual(amount, len(arts)) - sorted_arts = sorted(arts, key=lambda x: x[sort_name], - reverse=sort_dir == 'desc') - self.assertEqual(sorted_arts, arts) - - def test_list_and_sort_negative(self): - # sort by non-existent field - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', - [], sort=[("NONEXISTENT", "desc")]) - - # sort by wrong direction - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', - [], sort=[("name", "WRONG_DIR")]) - - # For performance sake sorting by more than one custom field - # is forbidden. Nevertheless, sorting by several basic field are - # absolutely fine. - # List of basic fields is located in glare/db/sqlalchemy/api.py as - # BASE_ARTIFACT_PROPERTIES tuple. - sort = [("int1", "desc"), ("float1", "desc")] - self.assertRaises(exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', - [], sort=sort) - - # sort with non-sortable fields - for name, field in sample_artifact.SampleArtifact.fields.items(): - for sort_dir in ['asc', 'desc']: - if not field.sortable: - self.assertRaises( - exc.BadRequest, self.controller.list, - self.req, 'sample_artifact', - [], sort=[(name, sort_dir)]) diff --git a/glare/tests/unit/api/test_locations.py b/glare/tests/unit/api/test_locations.py deleted file mode 100644 index bc2b1da..0000000 --- a/glare/tests/unit/api/test_locations.py +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import mock - -from glare.common import exception as exc -from glare.db import artifact_api -from glare.tests.unit import base - - -class TestLocations(base.BaseTestArtifactAPI): - """Test adding custom locations.""" - - def setUp(self): - super(TestLocations, self).setUp() - values = {'name': 'ttt', 'version': '1.0'} - self.sample_artifact = self.controller.create( - self.req, 'sample_artifact', values) - self.ct = 'application/vnd+openstack.glare-custom-location+json' - - def test_add_location(self): - with mock.patch('glance_store.backend.add_to_backend') as mocked_add: - body = {'url': 'https://FAKE_LOCATION.com', - 'md5': "fake", 'sha1': "fake_sha", "sha256": "fake_sha256"} - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], - 'blob', body, self.ct) - art = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual('active', art['blob']['status']) - self.assertEqual('fake', art['blob']['md5']) - self.assertEqual('fake_sha', art['blob']['sha1']) - self.assertEqual('fake_sha256', art['blob']['sha256']) - self.assertIsNone(art['blob']['size']) - self.assertIsNone(art['blob']['content_type']) - self.assertEqual('https://FAKE_LOCATION.com', - art['blob']['url']) - self.assertNotIn('id', art['blob']) - self.assertEqual(0, mocked_add.call_count) - - # Adding location for the second time leads to Conflict error - body = {'url': 'https://ANOTHER_FAKE_LOCATION.com', - 'md5': "fake", 'sha1': "fake_sha", "sha256": "fake_sha256"} - self.assertRaises( - exc.Conflict, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], - 'blob', body, self.ct) - - def test_add_dict_location(self): - with mock.patch('glance_store.backend.add_to_backend') as mocked_add: - body = {'url': 'https://FAKE_LOCATION.com', - 'md5': "fake", 'sha1': "fake_sha", "sha256": "fake_sha256"} - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/blob', body, self.ct) - art = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual('active', art['dict_of_blobs']['blob']['status']) - self.assertEqual('fake', art['dict_of_blobs']['blob']['md5']) - self.assertEqual('fake_sha', art['dict_of_blobs']['blob']['sha1']) - self.assertEqual('fake_sha256', - art['dict_of_blobs']['blob']['sha256']) - self.assertIsNone(art['dict_of_blobs']['blob']['size']) - self.assertIsNone(art['dict_of_blobs']['blob']['content_type']) - self.assertEqual('https://FAKE_LOCATION.com', - art['dict_of_blobs']['blob']['url']) - self.assertNotIn('id', art['blob']) - self.assertEqual(0, mocked_add.call_count) - - # Adding location for the second time leads to Conflict error - body = {'url': 'https://ANOTHER_FAKE_LOCATION.com', - 'md5': "fake", 'sha1': "fake_sha", "sha256": "fake_sha256"} - self.assertRaises( - exc.Conflict, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/blob', body, self.ct) - - def test_add_location_saving_blob(self): - body = {'url': 'https://FAKE_LOCATION.com', - 'md5': "fake", 'sha1': "fake_sha", "sha256": "fake_sha256"} - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], - 'blob', body, self.ct) - art = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - - # Change status of the blob to 'saving' - art['blob']['status'] = 'saving' - artifact_api.ArtifactAPI().update_blob( - self.req.context, self.sample_artifact['id'], - {'blob': art['blob']}) - art = self.controller.show( - self.req, 'sample_artifact', self.sample_artifact['id']) - self.assertEqual('saving', art['blob']['status']) - - body = {'url': 'https://ANOTHER_FAKE_LOCATION.com', - 'md5': "fake", 'sha1': "fake_sha", "sha256": "fake_sha256"} - self.assertRaises( - exc.Conflict, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], - 'blob', body, self.ct) - - def test_too_long_location_url(self): - body = {'url': 'http://FAKE_LOCATION%s.com' % ('a' * 2049), - 'md5': "fake", 'sha1': "fake_sha", "sha256": "fake_sha256"} - self.assertRaises( - exc.BadRequest, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], - 'blob', body, self.ct) diff --git a/glare/tests/unit/api/test_update.py b/glare/tests/unit/api/test_update.py deleted file mode 100644 index e379eee..0000000 --- a/glare/tests/unit/api/test_update.py +++ /dev/null @@ -1,675 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from six import BytesIO -from uuid import uuid4 - -from glare.common import exception as exc -from glare.db import artifact_api -from glare.tests.unit import base - - -class TestArtifactUpdate(base.BaseTestArtifactAPI): - - """Test Glare artifact updates.""" - - def setUp(self): - super(TestArtifactUpdate, self).setUp() - values = {'name': 'ttt', 'version': '1.0'} - self.sample_artifact = self.controller.create( - self.req, 'sample_artifact', values) - - def test_basic_update(self): - changes = [ - {'op': 'replace', 'path': '/name', 'value': 'new_name'}, - {'op': 'replace', 'path': '/version', 'value': '1.0.0'}, - {'op': 'replace', 'path': '/description', 'value': 'Test'}, - {'op': 'replace', 'path': '/tags', 'value': ['tag1', 'tag2']}, - {'op': 'replace', 'path': '/metadata', 'value': {'k': 'v'}}, - ] - res = self.update_with_values(changes) - self.assertEqual('new_name', res['name']) - self.assertEqual('1.0.0', res['version']) - self.assertEqual('Test', res['description']) - self.assertEqual({'tag1', 'tag2'}, set(res['tags'])) - self.assertEqual({'k': 'v'}, res['metadata']) - - def test_update_replace_values(self): - changes = [ - {'op': 'replace', 'path': '/int1', 'value': 1}, - {'op': 'replace', 'path': '/float1', 'value': 1.0}, - {'op': 'replace', 'path': '/str1', 'value': 'Test'}, - {'op': 'replace', 'path': '/list_of_int', 'value': [0, 1]}, - {'op': 'replace', 'path': '/dict_of_str', 'value': {'k': 'v'}}, - ] - res = self.update_with_values(changes) - self.assertEqual(1, res['int1']) - self.assertEqual(1.0, res['float1']) - self.assertEqual('Test', res['str1']) - self.assertEqual([0, 1], res['list_of_int']) - self.assertEqual({'k': 'v'}, res['dict_of_str']) - - changes = [ - {'op': 'replace', 'path': '/int1', 'value': 2}, - {'op': 'replace', 'path': '/float1', 'value': 2.0}, - {'op': 'replace', 'path': '/str1', 'value': 'New_Test'}, - {'op': 'replace', 'path': '/list_of_int/1', 'value': 4}, - {'op': 'replace', 'path': '/dict_of_str/k', 'value': 'new_val'}, - ] - res = self.update_with_values(changes) - self.assertEqual(2, res['int1']) - self.assertEqual(2.0, res['float1']) - self.assertEqual('New_Test', res['str1']) - self.assertEqual([0, 4], res['list_of_int']) - self.assertEqual({'k': 'new_val'}, res['dict_of_str']) - - def test_update_no_artifact_type(self): - changes = [{'op': 'replace', 'path': '/name', 'value': 'new_name'}] - self.update_with_values( - changes, exc_class=exc.NotFound, art_type='wrong_type') - - def test_update_name_version(self): - # Create additional artifacts - values = {'name': 'ttt', 'version': '2.0'} - self.controller.create(self.req, 'sample_artifact', values) - values = {'name': 'ddd', 'version': '1.0'} - self.controller.create(self.req, 'sample_artifact', values) - - # This name/version is already taken - changes = [{'op': 'replace', 'path': '/version', - 'value': '2.0'}] - self.assertRaises(exc.Conflict, self.update_with_values, changes) - changes = [{'op': 'replace', 'path': '/name', - 'value': 'ddd'}] - self.assertRaises(exc.Conflict, self.update_with_values, changes) - - # Test coercing - # name - changes = [{'op': 'replace', 'path': '/name', 'value': True}] - res = self.update_with_values(changes) - self.assertEqual('True', res['name']) - - changes = [{'op': 'replace', 'path': '/name', 'value': 1.0}] - res = self.update_with_values(changes) - self.assertEqual('1.0', res['name']) - - changes = [{'op': 'replace', 'path': '/name', 'value': "tt:t"}] - res = self.update_with_values(changes) - self.assertEqual('tt:t', res['name']) - - # version - changes = [{'op': 'replace', 'path': '/version', 'value': 2.0}] - res = self.update_with_values(changes) - self.assertEqual('2.0.0', res['version']) - - changes = [{'op': 'replace', 'path': '/version', 'value': '1-alpha'}] - res = self.update_with_values(changes) - self.assertEqual('1.0.0-alpha', res['version']) - - changes = [{'op': 'replace', 'path': '/version', 'value': '1:0'}] - res = self.update_with_values(changes) - self.assertEqual('1.0.0-0', res['version']) - - def test_update_deleted_artifact(self): - # Enable delayed delete - self.config(delayed_delete=True) - # Delete artifact and check its status - self.controller.delete(self.req, 'sample_artifact', - self.sample_artifact['id']) - art = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual('deleted', art['status']) - - changes = [{'op': 'replace', 'path': '/int1', - 'value': 1}] - self.assertRaises(exc.Forbidden, self.update_with_values, changes) - - changes = [{'op': 'replace', 'path': '/name', - 'value': 'new'}] - self.assertRaises(exc.Forbidden, self.update_with_values, changes) - - def test_update_lists(self): - changes = [{'op': 'replace', 'path': '/list_of_str', - 'value': ['val1', 'val2']}] - res = self.update_with_values(changes) - self.assertEqual({'val1', 'val2'}, set(res['list_of_str'])) - - changes = [{'op': 'remove', 'path': '/list_of_str/0'}] - res = self.update_with_values(changes) - self.assertEqual(['val2'], res['list_of_str']) - - changes = [{'op': 'replace', 'path': '/list_of_str', 'value': None}] - res = self.update_with_values(changes) - self.assertEqual([], res['list_of_str']) - - changes = [{'op': 'add', 'path': '/list_of_str/-', 'value': 'val1'}] - res = self.update_with_values(changes) - self.assertEqual(['val1'], res['list_of_str']) - - changes = [{'op': 'replace', 'path': '/list_of_str/0', - 'value': 'val2'}] - res = self.update_with_values(changes) - self.assertEqual(['val2'], res['list_of_str']) - - changes = [{'op': 'replace', 'path': '/list_of_str', 'value': []}] - res = self.update_with_values(changes) - self.assertEqual([], res['list_of_str']) - - changes = [{'op': 'replace', 'path': '/list_of_str', 'value': {}}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/list_of_str', - 'value': {'a': 'b'}}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/list_of_str', - 'value': [['a']]}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'remove', 'path': '/list_of_str/-', - 'value': 'val3'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - def test_update_dicts(self): - changes = [{'op': 'replace', 'path': '/dict_of_str', - 'value': {'k1': 'v1', 'k2': 'v2'}}] - res = self.update_with_values(changes) - self.assertEqual({'k1': 'v1', 'k2': 'v2'}, res['dict_of_str']) - - changes = [{'op': 'remove', 'path': '/dict_of_str/k1'}] - res = self.update_with_values(changes) - self.assertEqual({'k2': 'v2'}, res['dict_of_str']) - - changes = [{'op': 'replace', 'path': '/dict_of_str', 'value': None}] - res = self.update_with_values(changes) - self.assertEqual({}, res['dict_of_str']) - - changes = [{'op': 'add', 'path': '/dict_of_str/k1', 'value': 'v1'}] - res = self.update_with_values(changes) - self.assertEqual({'k1': 'v1'}, res['dict_of_str']) - - changes = [{'op': 'replace', 'path': '/dict_of_str/k1', - 'value': 'v2'}] - res = self.update_with_values(changes) - self.assertEqual({'k1': 'v2'}, res['dict_of_str']) - - changes = [{'op': 'replace', 'path': '/dict_of_str', 'value': {}}] - res = self.update_with_values(changes) - self.assertEqual({}, res['dict_of_str']) - - changes = [{'op': 'replace', 'path': '/dict_of_str', 'value': []}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/dict_of_str', - 'value': ['a']}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/dict_of_str/k10', - 'value': {'k100': 'v100'}}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - def test_update_artifact_wrong_parameters(self): - changes = [{'op': 'replace', 'path': '/name', 'value': ''}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/name', 'value': 'a' * 256}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/version', 'value': ''}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/version', 'value': 'invalid'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/version', 'value': -1}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/description', - 'value': 'a' * 4097}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/tags', 'value': ['a' * 256]}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/tags', 'value': ['']}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/tags', 'value': ['a/a']}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/tags', 'value': ['a,a']}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/tags', - 'value': [str(i) for i in range(256)]}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/metadata', - 'value': {'key': 'a' * 256}}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/metadata', - 'value': {'': 'a'}}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/metadata', - 'value': {'a' * 256: 'a'}}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/metadata', - 'value': {('a' + str(i)): 'a' for i in range(256)}}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/int1', 'value': 'aaa'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/float1', 'value': 'aaa'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - def test_update_artifact_not_existing_field(self): - changes = [{'op': 'replace', 'path': '/wrong_field', 'value': 'a'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/', 'value': 'a'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'add', 'path': '/wrong_field', 'value': 'a'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'add', 'path': '/', 'value': 'a'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - def test_update_artifact_remove_field(self): - changes = [{'op': 'remove', 'path': '/name'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'remove', 'path': '/list_of_int/10'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'remove', 'path': '/status'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [ - {'op': 'add', 'path': '/list_of_int/-', 'value': 4}, - {'op': 'add', 'path': '/dict_of_str/k', 'value': 'new_val'}, - ] - self.update_with_values(changes) - changes = [{'op': 'remove', 'path': '/list_of_int/0'}] - res = self.update_with_values(changes) - self.assertEqual([], res['list_of_int']) - changes = [{'op': 'remove', 'path': '/dict_of_str/k'}] - res = self.update_with_values(changes) - self.assertEqual({}, res['dict_of_str']) - - def test_update_artifact_blob(self): - changes = [{'op': 'replace', 'path': '/blob', 'value': 'a'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - def test_update_artifact_system_fields(self): - changes = [{'op': 'replace', 'path': '/id', - 'value': '5fdeba9a-ba12-4147-bb8a-a8daada84222'}] - self.update_with_values(changes, exc_class=exc.Forbidden) - - changes = [{'op': 'replace', 'path': '/created_at', - 'value': '2000-01-01'}] - self.update_with_values(changes, exc_class=exc.Forbidden) - - changes = [{'op': 'replace', 'path': '/updated_at', - 'value': '2000-01-01'}] - self.update_with_values(changes, exc_class=exc.Forbidden) - - changes = [{'op': 'replace', 'path': '/activated_at', - 'value': '2000-01-01'}] - self.update_with_values(changes, exc_class=exc.Forbidden) - - changes = [{'op': 'replace', 'path': '/owner', 'value': 'new_owner'}] - self.update_with_values(changes, exc_class=exc.Forbidden) - - changes = [{'op': 'replace', 'path': '/system_attribute', - 'value': 'some_value'}] - self.update_with_values(changes, exc_class=exc.Forbidden) - - def test_update_artifact_visibility(self): - self.req = self.get_fake_request(user=self.users['admin']) - - changes = [{'op': 'replace', 'path': '/visibility', - 'value': 'wrong_value'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/visibility', - 'value': 'public'}] - self.update_with_values(changes, exc_class=exc.Forbidden) - - changes = [{'op': 'replace', 'path': '/visibility', - 'value': None}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - changes = [{'op': 'replace', 'path': '/string_required', - 'value': 'some_string'}, - {'op': 'replace', 'path': '/status', - 'value': 'active'}] - res = self.update_with_values(changes) - self.assertEqual('active', res['status']) - self.assertEqual('some_string', res['string_required']) - - changes = [{'op': 'replace', 'path': '/visibility', 'value': 'public'}] - res = self.update_with_values(changes) - self.assertEqual('public', res['visibility']) - - changes = [{'op': 'replace', 'path': '/visibility', 'value': 'public'}] - res = self.update_with_values(changes) - self.assertEqual('public', res['visibility']) - - changes = [{'op': 'replace', 'path': '/visibility', - 'value': 'private'}] - self.update_with_values(changes, exc_class=exc.Forbidden) - - def test_update_artifact_status(self): - self.req = self.get_fake_request(user=self.users['admin']) - - changes = [{'op': 'replace', 'path': '/status', - 'value': 'wrong_value'}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - # It's forbidden to activate artifact until required_on_activate field - # 'string_required' is set - changes = [{'op': 'replace', 'path': '/status', - 'value': 'active'}] - self.update_with_values(changes, exc_class=exc.Forbidden) - - changes = [{'op': 'replace', 'path': '/status', - 'value': None}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - # It's forbidden to deactivate drafted artifact - changes = [{'op': 'replace', 'path': '/status', - 'value': 'deactivated'}] - self.update_with_values(changes, exc_class=exc.Forbidden) - - changes = [{'op': 'replace', 'path': '/string_required', - 'value': 'some_string'}] - res = self.update_with_values(changes) - self.assertEqual('some_string', res['string_required']) - - # It's impossible to activate the artifact when it has 'saving' blobs - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], 'blob', - BytesIO(b'aaa'), 'application/octet-stream') - self.sample_artifact = self.controller.show( - self.req, 'sample_artifact', self.sample_artifact['id']) - - # Change status of the blob to 'saving' - self.sample_artifact['blob']['status'] = 'saving' - artifact_api.ArtifactAPI().update_blob( - self.req.context, self.sample_artifact['id'], - {'blob': self.sample_artifact['blob']}) - self.sample_artifact = self.controller.show( - self.req, 'sample_artifact', self.sample_artifact['id']) - self.assertEqual('saving', self.sample_artifact['blob']['status']) - - # Now activating of the artifact leads to Conflict - changes = [{'op': 'replace', 'path': '/status', 'value': 'active'}] - self.assertRaises(exc.Conflict, self.update_with_values, changes) - - # Reverting status of the blob to active again - self.sample_artifact['blob']['status'] = 'active' - artifact_api.ArtifactAPI().update_blob( - self.req.context, self.sample_artifact['id'], - {'blob': self.sample_artifact['blob']}) - self.sample_artifact = self.controller.show( - self.req, 'sample_artifact', self.sample_artifact['id']) - self.assertEqual('active', self.sample_artifact['blob']['status']) - - # It's possible to change artifact status with other fields in - # one request - changes = [ - {'op': 'replace', 'path': '/name', 'value': 'new_name'}, - {'op': 'replace', 'path': '/status', 'value': 'active'} - ] - self.sample_artifact = self.update_with_values(changes) - self.assertEqual('new_name', self.sample_artifact['name']) - self.assertEqual('active', self.sample_artifact['status']) - - changes = [{'op': 'replace', 'path': '/status', 'value': 'active'}] - res = self.update_with_values(changes) - self.assertEqual('active', res['status']) - - # It's possible to change artifact status with other fields in - # one request - changes = [ - {'op': 'replace', 'path': '/string_mutable', 'value': 'str'}, - {'op': 'replace', 'path': '/status', 'value': 'deactivated'} - ] - self.sample_artifact = self.update_with_values(changes) - self.assertEqual('str', self.sample_artifact['string_mutable']) - self.assertEqual('deactivated', self.sample_artifact['status']) - - changes = [{'op': 'replace', 'path': '/status', - 'value': 'deactivated'}] - res = self.update_with_values(changes) - self.assertEqual('deactivated', res['status']) - - # It's possible to change artifact status with other fields in - # one request - changes = [ - {'op': 'replace', 'path': '/status', 'value': 'active'}, - {'op': 'replace', 'path': '/description', 'value': 'test'}, - ] - self.sample_artifact = self.update_with_values(changes) - self.assertEqual('test', self.sample_artifact['description']) - self.assertEqual('active', self.sample_artifact['status']) - - changes = [{'op': 'replace', 'path': '/status', 'value': 'active'}] - res = self.update_with_values(changes) - self.assertEqual('active', res['status']) - - changes = [{'op': 'replace', 'path': '/status', - 'value': None}] - self.update_with_values(changes, exc_class=exc.BadRequest) - - # Enable delayed delete - self.config(delayed_delete=True) - # Delete artifact and check its status - self.controller.delete(self.req, 'sample_artifact', - self.sample_artifact['id']) - art = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual('deleted', art['status']) - - changes = [{'op': 'replace', 'path': '/status', 'value': 'active'}] - self.assertRaises(exc.Forbidden, - self.update_with_values, changes) - - def test_update_artifact_mutable_fields(self): - changes = [{'op': 'replace', 'path': '/string_required', - 'value': 'some_string'}] - res = self.update_with_values(changes) - self.assertEqual('some_string', res['string_required']) - - changes = [{'op': 'replace', 'path': '/status', 'value': 'active'}] - res = self.update_with_values(changes) - self.assertEqual('active', res['status']) - - changes = [{'op': 'replace', 'path': '/name', 'value': 'new_name'}] - self.update_with_values(changes, exc_class=exc.Forbidden) - - changes = [{'op': 'replace', 'path': '/metadata', 'value': {'k': 'v'}}] - self.update_with_values(changes, exc_class=exc.Forbidden) - - changes = [{'op': 'add', 'path': '/metadata/k', 'value': 'v'}] - self.update_with_values(changes, exc_class=exc.Forbidden) - - changes = [{'op': 'replace', 'path': '/tags', 'value': ['a']}] - res = self.update_with_values(changes) - self.assertEqual(['a'], res['tags']) - - changes = [{'op': 'add', 'path': '/tags/-', 'value': 'b'}] - res = self.update_with_values(changes) - self.assertEqual({'a', 'b'}, set(res['tags'])) - - changes = [{'op': 'replace', 'path': '/description', 'value': 'Test'}] - res = self.update_with_values(changes) - self.assertEqual('Test', res['description']) - - changes = [{'op': 'replace', 'path': '/string_mutable', - 'value': 'some_value'}] - res = self.update_with_values(changes) - self.assertEqual('some_value', res['string_mutable']) - - def test_update_artifact_unicode(self): - name = u'\u0442\u0435\u0441\u0442' - description = u'\u041E\u043F\u0438\u0441\u0430\u043D\u0438\u0435' - tags = [u'\u041C\u0435\u0442\u043A\u0430'] - metadata = {'key': u'\u0417\u043D\u0430\u0447\u0435\u043D\u0438\u0435'} - changes = [ - {'op': 'replace', 'path': '/name', 'value': name}, - {'op': 'replace', 'path': '/version', 'value': '1.0.0'}, - {'op': 'replace', 'path': '/description', 'value': description}, - {'op': 'replace', 'path': '/tags', 'value': tags}, - {'op': 'replace', 'path': '/metadata', 'value': metadata}, - ] - res = self.update_with_values(changes) - - self.assertEqual(name, res['name']) - self.assertEqual('1.0.0', res['version']) - self.assertEqual(self.users['user1']['tenant_id'], res['owner']) - self.assertEqual('drafted', res['status']) - self.assertEqual('private', res['visibility']) - self.assertEqual(description, res['description']) - self.assertEqual(metadata, res['metadata']) - self.assertEqual(tags, res['tags']) - - def test_update_artifact_4_byte_unicode(self): - bad_name = u'A name with forbidden symbol \U0001f62a' - changes = [ - {'op': 'replace', 'path': '/name', 'value': bad_name} - ] - - self.assertRaises(exc.BadRequest, self.update_with_values, changes) - - -class TestLinks(base.BaseTestArtifactAPI): - - """Test Glare artifact link management.""" - - def setUp(self): - super(TestLinks, self).setUp() - values = {'name': 'ttt', 'version': '1.0'} - self.sample_artifact = self.controller.create( - self.req, 'sample_artifact', values) - values = {'name': 'sss', 'version': '1.0'} - self.dependency = self.controller.create( - self.req, 'sample_artifact', values) - - def test_manage_links(self): - dep_url = "/artifacts/sample_artifact/%s" % self.dependency['id'] - - # set valid link - patch = [{"op": "replace", "path": "/link1", "value": dep_url}] - res = self.update_with_values(patch) - self.assertEqual(res['link1'], dep_url) - - # remove link from artifact - patch = [{"op": "replace", "path": "/link1", "value": None}] - res = self.update_with_values(patch) - self.assertIsNone(res['link1']) - - # set invalid external link - dep_url = "http://example.com/artifacts/" \ - "sample_artifact/%s" % self.dependency['id'] - patch = [{"op": "replace", "path": "/link1", "value": dep_url}] - self.assertRaises(exc.BadRequest, self.update_with_values, patch) - - # try to set invalid link - patch = [{"op": "replace", "path": "/link1", "value": "Invalid"}] - self.assertRaises(exc.BadRequest, self.update_with_values, patch) - - # try to set link to non-existing artifact - non_exiting_url = "/artifacts/sample_artifact/%s" % uuid4() - patch = [{"op": "replace", - "path": "/link1", - "value": non_exiting_url}] - self.assertRaises(exc.BadRequest, self.update_with_values, patch) - - def test_manage_dict_of_links(self): - dep_url = "/artifacts/sample_artifact/%s" % self.dependency['id'] - - # set valid link - patch = [{"op": "add", - "path": "/dict_of_links/link1", - "value": dep_url}] - res = self.update_with_values(patch) - self.assertEqual(res['dict_of_links']['link1'], dep_url) - - # remove link from artifact - patch = [{"op": "remove", - "path": "/dict_of_links/link1"}] - res = self.update_with_values(patch) - self.assertNotIn('link1', res['dict_of_links']) - - # set invalid external link - dep_url = "http://example.com/artifacts/" \ - "sample_artifact/%s" % self.dependency['id'] - patch = [{"op": "replace", - "path": "/dict_of_links/link1", "value": dep_url}] - self.assertRaises(exc.BadRequest, self.update_with_values, patch) - - # try to set invalid link - patch = [{"op": "replace", - "path": "/dict_of_links/link1", - "value": "Invalid"}] - self.assertRaises(exc.BadRequest, self.update_with_values, patch) - - # try to set link to non-existing artifact - non_exiting_url = "/artifacts/sample_artifact/%s" % uuid4() - patch = [{"op": "replace", - "path": "/dict_of_links/link1", - "value": non_exiting_url}] - self.assertRaises(exc.BadRequest, self.update_with_values, patch) - - def test_manage_list_of_links(self): - dep_url = "/artifacts/sample_artifact/%s" % self.dependency['id'] - - # set valid link - patch = [{"op": "add", - "path": "/list_of_links/-", - "value": dep_url}] - res = self.update_with_values(patch) - self.assertEqual(res['list_of_links'][0], dep_url) - - # remove link from artifact - patch = [{"op": "remove", - "path": "/list_of_links/0"}] - res = self.update_with_values(patch) - self.assertEqual(0, len(res['list_of_links'])) - - # set invalid external link - dep_url = "http://example.com/artifacts/" \ - "sample_artifact/%s" % self.dependency['id'] - patch = [{"op": "replace", - "path": "/list_of_links/-", "value": dep_url}] - self.assertRaises(exc.BadRequest, self.update_with_values, patch) - - # try to set invalid link - patch = [{"op": "add", - "path": "/list_of_links/-", - "value": "Invalid"}] - self.assertRaises(exc.BadRequest, self.update_with_values, patch) - - # try to set link to non-existing artifact - non_exiting_url = "/artifacts/sample_artifact/%s" % uuid4() - patch = [{"op": "add", - "path": "/list_of_links/-", - "value": non_exiting_url}] - self.assertRaises(exc.BadRequest, self.update_with_values, patch) diff --git a/glare/tests/unit/api/test_upload.py b/glare/tests/unit/api/test_upload.py deleted file mode 100644 index 236fa55..0000000 --- a/glare/tests/unit/api/test_upload.py +++ /dev/null @@ -1,281 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from glance_store import exceptions as store_exc -import mock -from six import BytesIO - -from glare.common import exception as exc -from glare.db import artifact_api -from glare.tests import sample_artifact -from glare.tests.unit import base - - -class TestArtifactUpload(base.BaseTestArtifactAPI): - """Test blob uploading.""" - - def setUp(self): - super(TestArtifactUpload, self).setUp() - values = {'name': 'ttt', 'version': '1.0'} - self.sample_artifact = self.controller.create( - self.req, 'sample_artifact', values) - - def test_upload_basic(self): - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], 'blob', - BytesIO(b'aaa'), 'application/octet-stream') - artifact = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual(3, artifact['blob']['size']) - self.assertEqual('active', artifact['blob']['status']) - - def test_blob_size_too_big(self): - # small blob size is limited by 10 bytes - self.assertRaises( - exc.RequestEntityTooLarge, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], - 'small_blob', BytesIO(b'a' * 11), 'application/octet-stream') - - def test_already_uploaded(self): - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], 'blob', - BytesIO(b'aaa'), 'application/octet-stream') - artifact = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual(3, artifact['blob']['size']) - self.assertEqual('active', artifact['blob']['status']) - - # Re-uploading blob leads to Conflict error - self.assertRaises( - exc.Conflict, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], 'blob', - BytesIO(b'aaa'), 'application/octet-stream') - - def test_upload_saving_blob(self): - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], 'blob', - BytesIO(b'aaa'), 'application/octet-stream') - self.sample_artifact = self.controller.show( - self.req, 'sample_artifact', self.sample_artifact['id']) - - # Change status of the blob to 'saving' - self.sample_artifact['blob']['status'] = 'saving' - artifact_api.ArtifactAPI().update_blob( - self.req.context, self.sample_artifact['id'], - {'blob': self.sample_artifact['blob']}) - self.sample_artifact = self.controller.show( - self.req, 'sample_artifact', self.sample_artifact['id']) - self.assertEqual('saving', self.sample_artifact['blob']['status']) - - # Uploading new blob leads to Conflict error - self.assertRaises( - exc.Conflict, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], 'blob', - BytesIO(b'aaa'), 'application/octet-stream') - - def test_storage_error(self): - self.config(enabled_artifact_types=['sample_artifact']) - with mock.patch('glance_store.backend.add_to_backend', - side_effect=store_exc.GlanceStoreException): - self.assertRaises( - exc.GlareException, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], - 'blob', BytesIO(b'aaa'), 'application/octet-stream') - artifact = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertIsNone(artifact['blob']) - - def test_upload_blob_dict(self): - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/blb1', - BytesIO(b'aaa'), 'application/octet-stream') - artifact = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual(3, artifact['dict_of_blobs']['blb1']['size']) - self.assertEqual('active', artifact['dict_of_blobs']['blb1']['status']) - - # upload another one - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/blb2', - BytesIO(b'aaa'), 'application/octet-stream') - artifact = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual(3, artifact['dict_of_blobs']['blb2']['size']) - self.assertEqual('active', artifact['dict_of_blobs']['blb2']['status']) - - def test_upload_oversized_blob_dict(self): - # external location shouldn't affect folder size - ct = 'application/vnd+openstack.glare-custom-location+json' - body = {'url': 'https://FAKE_LOCATION.com', - 'md5': "fake", 'sha1': "fake_sha", "sha256": "fake_sha256"} - artifact = self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/external', body, ct) - self.assertIsNone(artifact['dict_of_blobs']['external']['size']) - self.assertEqual('active', - artifact['dict_of_blobs']['external']['status']) - - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/a', - BytesIO(1800 * b'a'), 'application/octet-stream') - artifact = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual(1800, artifact['dict_of_blobs']['a']['size']) - self.assertEqual('active', artifact['dict_of_blobs']['a']['status']) - - # upload another one - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/b', - BytesIO(199 * b'b'), 'application/octet-stream') - artifact = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual(199, artifact['dict_of_blobs']['b']['size']) - self.assertEqual('active', artifact['dict_of_blobs']['b']['status']) - - # upload to have size of 2000 bytes exactly - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/c', - BytesIO(b'c'), 'application/octet-stream') - artifact = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual(1, artifact['dict_of_blobs']['c']['size']) - self.assertEqual('active', artifact['dict_of_blobs']['c']['status']) - - # Upload to have more than max folder limit, more than 2000 - self.assertRaises( - exc.RequestEntityTooLarge, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/d', BytesIO(b'd'), 'application/octet-stream') - - def test_existing_blob_dict_key(self): - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/blb', BytesIO(b'aaa'), 'application/octet-stream') - artifact = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual(3, artifact['dict_of_blobs']['blb']['size']) - self.assertEqual('active', artifact['dict_of_blobs']['blb']['status']) - - # If blob key already exists Glare return Conflict error - self.assertRaises( - exc.Conflict, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/blb', BytesIO(b'aaa'), 'application/octet-stream') - - def test_blob_dict_storage_error(self): - self.config(enabled_artifact_types=['sample_artifact']) - with mock.patch('glance_store.backend.add_to_backend', - side_effect=store_exc.GlanceStoreException): - self.assertRaises( - exc.GlareException, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/blb', BytesIO(b'aaa'), - 'application/octet-stream') - artifact = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertNotIn('blb', artifact['dict_of_blobs']) - - @mock.patch('os.remove') - def test_upload_with_hook(self, mocked_os_remove): - with mock.patch.object( - sample_artifact.SampleArtifact, 'validate_upload', - return_value=(BytesIO(b'aaa'), 'temporary_path')): - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], - 'blob', BytesIO(b'aaa'), 'application/octet-stream') - artifact = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual(3, artifact['blob']['size']) - self.assertEqual('active', artifact['blob']['status']) - # If temporary folder has been created it must be removed - mocked_os_remove.assert_called_once_with('temporary_path') - - @mock.patch('os.remove') - def test_upload_with_hook_error(self, mocked_os_remove): - with mock.patch.object( - sample_artifact.SampleArtifact, 'validate_upload', - side_effect=Exception): - self.assertRaises( - exc.BadRequest, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/blb', BytesIO(b'aaa'), - 'application/octet-stream') - art = self.controller.show(self.req, 'sample_artifact', - self.sample_artifact['id']) - self.assertEqual({}, art['dict_of_blobs']) - self.assertEqual(0, mocked_os_remove.call_count) - - def test_upload_nonexistent_field(self): - self.assertRaises( - exc.BadRequest, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], 'INVALID', - BytesIO(b'aaa'), 'application/octet-stream') - - self.assertRaises( - exc.BadRequest, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], - 'blob/key', BytesIO(b'aaa'), 'application/octet-stream') - - def test_upload_non_blob_field(self): - self.assertRaises( - exc.BadRequest, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], 'int1', - BytesIO(b'aaa'), 'application/octet-stream') - - def test_upload_blob_dict_without_key(self): - self.assertRaises( - exc.BadRequest, self.controller.upload_blob, - self.req, 'sample_artifact', self.sample_artifact['id'], - 'dict_of_blobs/', BytesIO(b'aaa'), 'application/octet-stream') - - def test_parallel_uploading_and_activation(self): - """ - This test check whether it is possible to activate an artifact, - when it has uploading blobs. - """ - self.controller.upload_blob( - self.req, 'sample_artifact', self.sample_artifact['id'], 'blob', - BytesIO(b'aaa'), 'application/octet-stream') - self.sample_artifact = self.controller.show( - self.req, 'sample_artifact', self.sample_artifact['id']) - changes = [{'op': 'replace', - 'path': '/string_required', - 'value': 'ttt'}] - self.update_with_values(changes) - - # Change status of the blob to 'saving' - self.sample_artifact['blob']['status'] = 'saving' - artifact_api.ArtifactAPI().update_blob( - self.req.context, self.sample_artifact['id'], - {'blob': self.sample_artifact['blob']}) - self.sample_artifact = self.controller.show( - self.req, 'sample_artifact', self.sample_artifact['id']) - self.assertEqual('saving', self.sample_artifact['blob']['status']) - - # activation of artifact with saving blobs lead to Conflict error - changes = [{'op': 'replace', 'path': '/status', 'value': 'active'}] - self.assertRaises(exc.Conflict, self.update_with_values, changes) - - # create another artifact which doesn't have uploading blobs - values = {'name': 'ttt', 'version': '2.0', 'string_required': 'rrr'} - new_artifact = self.controller.create( - self.req, 'sample_artifact', values) - # activation is possible - res = self.update_with_values(changes, art_id=new_artifact['id']) - self.assertEqual('active', res['status']) diff --git a/glare/tests/unit/base.py b/glare/tests/unit/base.py deleted file mode 100644 index 8ccd238..0000000 --- a/glare/tests/unit/base.py +++ /dev/null @@ -1,167 +0,0 @@ -# Copyright 2012 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import fixtures -import glance_store as store -from glance_store import location -import jsonpatch -from oslo_config import cfg -from oslo_config import fixture as cfg_fixture -from oslo_policy import policy as os_policy -from oslo_utils import uuidutils -import testtools - -from glare.api.middleware import context -from glare.api.v1 import resource -from glare.common import policy -from glare.common import wsgi -from glare.db.sqlalchemy import api as db_api - -CONF = cfg.CONF - - -class BaseTestCase(testtools.TestCase): - - def setUp(self): - super(BaseTestCase, self).setUp() - self._config_fixture = self.useFixture(cfg_fixture.Config()) - - self.users = { - 'user1': { - 'id': uuidutils.generate_uuid(), - 'tenant_id': uuidutils.generate_uuid(), - 'token': uuidutils.generate_uuid(), - 'roles': ['member'] - }, - 'user2': { - 'id': uuidutils.generate_uuid(), - 'tenant_id': uuidutils.generate_uuid(), - 'token': uuidutils.generate_uuid(), - 'roles': ['member'] - }, - 'admin': { - 'id': uuidutils.generate_uuid(), - 'tenant_id': uuidutils.generate_uuid(), - 'token': uuidutils.generate_uuid(), - 'roles': ['admin'] - }, - 'anonymous': { - 'id': None, - 'tenant_id': None, - 'token': None, - 'roles': [] - } - } - - self.test_dir = self.useFixture(fixtures.TempDir()).path - - CONF.set_default('connection', 'sqlite://', group='database') - db_api.setup_db() - - enf = policy.init(use_conf=False) - for default in enf.registered_rules.values(): - if default.name not in enf.rules: - enf.rules[default.name] = default.check - - self.config( - custom_artifact_types_modules=[ - 'glare.tests.sample_artifact', - 'glare.tests.hooks_artifact' - ], - enabled_artifact_types=[ - 'hooks_artifact', 'sample_artifact:database', 'images', - 'heat_templates', 'heat_environments', 'murano_packages', - 'tosca_templates'] - ) - - location.SCHEME_TO_CLS_MAP = {} - self._create_stores() - self.addCleanup(setattr, location, 'SCHEME_TO_CLS_MAP', dict()) - - self.addCleanup(db_api.drop_db) - self.addCleanup(policy.reset) - - def config(self, **kw): - """Override some configuration values. - - The keyword arguments are the names of configuration options to - override and their values. - - If a group argument is supplied, the overrides are applied to - the specified configuration option group. - - All overrides are automatically cleared at the end of the current - test by the fixtures cleanup process. - """ - self._config_fixture.config(**kw) - - @staticmethod - def policy(**new_rules): - enf = policy.init(use_conf=False) - for rule_name, rule_check_str in new_rules.items(): - enf.rules[rule_name] = os_policy.RuleDefault( - rule_name, rule_check_str).check - - @staticmethod - def get_fake_request(user): - req = wsgi.Request.blank('') - req.method = 'POST' - kwargs = { - 'user': user['id'], - 'tenant': user['tenant_id'], - 'roles': user['roles'], - 'is_admin': 'admin' in user['roles'], - } - req.context = context.RequestContext(**kwargs) - return req - - def _create_stores(self): - """Create known stores. Mock out sheepdog's subprocess dependency - on collie. - - :returns: the number of how many store drivers been loaded. - """ - store.register_opts(CONF) - - self.config(default_store='filesystem', - filesystem_store_datadir=self.test_dir, - group="glance_store") - - store.create_stores(CONF) - - @staticmethod - def generate_json_patch(values): - patch = jsonpatch.JsonPatch(values) - tuple(map(patch._get_operation, patch.patch)) - return patch - - def update_with_values(self, values, exc_class=None, - art_type='sample_artifact', art_id=None): - patch = self.generate_json_patch(values) - art_id = art_id or self.sample_artifact['id'] - if exc_class is None: - return self.controller.update(self.req, art_type, art_id, patch) - else: - self.assertRaises(exc_class, self.controller.update, self.req, - art_type, art_id, patch) - - -class BaseTestArtifactAPI(BaseTestCase): - - def setUp(self): - super(BaseTestArtifactAPI, self).setUp() - self.controller = resource.ArtifactsController() - self.req = self.get_fake_request(user=self.users['user1']) diff --git a/glare/tests/unit/db/__init__.py b/glare/tests/unit/db/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/tests/unit/db/migrations/__init__.py b/glare/tests/unit/db/migrations/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/tests/unit/db/migrations/test_migrations.py b/glare/tests/unit/db/migrations/test_migrations.py deleted file mode 100644 index b5e9b37..0000000 --- a/glare/tests/unit/db/migrations/test_migrations.py +++ /dev/null @@ -1,258 +0,0 @@ -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Tests for database migrations. There are "opportunistic" tests for both mysql -and postgresql in here, which allows testing against these databases in a -properly configured unit test environment. -For the opportunistic testing you need to set up a db named 'openstack_citest' -with user 'openstack_citest' and password 'openstack_citest' on localhost. -The test will then use that db and u/p combo to run the tests. -For postgres on Ubuntu this can be done with the following commands: -:: - sudo -u postgres psql - postgres=# create user openstack_citest with createdb login password - 'openstack_citest'; - postgres=# create database openstack_citest with owner openstack_citest; -""" - -import contextlib - -from alembic import script -import mock -from oslo_db.sqlalchemy import utils as db_utils -from oslo_db.tests.sqlalchemy import base as test_base -from oslo_log import log as logging -import sqlalchemy -import sqlalchemy.exc - -from glare.db.migration import migration -import glare.db.sqlalchemy.api -from glare.tests.unit import glare_fixtures - -LOG = logging.getLogger(__name__) - - -@contextlib.contextmanager -def patch_with_engine(engine): - with mock.patch.object(glare.db.sqlalchemy.api, - 'get_engine') as patch_engine: - patch_engine.return_value = engine - yield - - -class WalkVersionsMixin(object): - def _walk_versions(self, engine=None, alembic_cfg=None): - # Determine latest version script from the repo, then - # upgrade from 1 through to the latest, with no data - # in the databases. This just checks that the schema itself - # upgrades successfully. - - # Place the database under version control - with patch_with_engine(engine): - - script_directory = script.ScriptDirectory.from_config(alembic_cfg) - - self.assertIsNone(self.migration_api.version(engine)) - - versions = [ver for ver in script_directory.walk_revisions()] - - for version in reversed(versions): - with glare_fixtures.BannedDBSchemaOperations(): - self._migrate_up(engine, alembic_cfg, - version.revision, with_data=True) - - for version in versions: - with glare_fixtures.BannedDBSchemaOperations(): - self._migrate_down(engine, alembic_cfg, - version.down_revision, with_data=True) - - def _migrate_up(self, engine, config, version, with_data=False): - """migrate up to a new version of the db. - - We allow for data insertion and post checks at every - migration version with special _pre_upgrade_### and - _check_### functions in the main test. - """ - try: - if with_data: - data = None - pre_upgrade = getattr( - self, "_pre_upgrade_%s" % version, None) - if pre_upgrade: - data = pre_upgrade(engine) - - self.migration_api.upgrade(version, config=config) - self.assertEqual(version, self.migration_api.version(engine)) - if with_data: - check = getattr(self, "_check_%s" % version, None) - if check: - check(engine, data) - except Exception: - LOG.error("Failed to migrate to version %(version)s on engine " - "%(engine)s", {'version': version, 'engine': engine}) - raise - - def _migrate_down(self, engine, config, version, with_data=False): - try: - self.migration_api.downgrade(version, config=config) - if with_data: - post_downgrade = getattr( - self, "_post_downgrade_%s" % version, None) - if post_downgrade: - post_downgrade(engine) - except Exception: - LOG.error("Failed to migrate to version %(version)s on engine " - "%(engine)s", {'version': version, 'engine': engine}) - raise - - -class GlareMigrationsCheckers(object): - - def setUp(self): - super(GlareMigrationsCheckers, self).setUp() - self.config = migration.get_alembic_config() - self.migration_api = migration - - def assert_table(self, engine, table_name, indices, columns): - table = db_utils.get_table(engine, table_name) - index_data = [(index.name, index.columns.keys()) for index in - table.indexes] - column_data = [column.name for column in table.columns] - self.assertItemsEqual(columns, column_data) - self.assertItemsEqual(indices, index_data) - - def test_walk_versions(self): - self._walk_versions(self.engine, self.config) - - def _pre_upgrade_001(self, engine): - self.assertRaises(sqlalchemy.exc.NoSuchTableError, - db_utils.get_table, engine, - 'glare_artifacts') - self.assertRaises(sqlalchemy.exc.NoSuchTableError, - db_utils.get_table, engine, - 'glare_artifact_tags') - self.assertRaises(sqlalchemy.exc.NoSuchTableError, - db_utils.get_table, engine, - 'glare_artifact_properties') - self.assertRaises(sqlalchemy.exc.NoSuchTableError, - db_utils.get_table, engine, - 'glare_artifact_blobs') - self.assertRaises(sqlalchemy.exc.NoSuchTableError, - db_utils.get_table, engine, - 'glare_artifact_locks') - - def _check_001(self, engine, data): - artifacts_indices = [('ix_glare_artifact_name_and_version', - ['name', 'version_prefix', 'version_suffix']), - ('ix_glare_artifact_type', - ['type_name']), - ('ix_glare_artifact_status', ['status']), - ('ix_glare_artifact_visibility', ['visibility']), - ('ix_glare_artifact_owner', ['owner'])] - artifacts_columns = ['id', - 'name', - 'type_name', - 'version_prefix', - 'version_suffix', - 'version_meta', - 'description', - 'visibility', - 'status', - 'owner', - 'created_at', - 'updated_at', - 'activated_at'] - self.assert_table(engine, 'glare_artifacts', artifacts_indices, - artifacts_columns) - - tags_indices = [('ix_glare_artifact_tags_artifact_id', - ['artifact_id']), - ('ix_glare_artifact_tags_artifact_id_tag_value', - ['artifact_id', - 'value'])] - tags_columns = ['id', - 'artifact_id', - 'value'] - self.assert_table(engine, 'glare_artifact_tags', tags_indices, - tags_columns) - - prop_indices = [ - ('ix_glare_artifact_properties_artifact_id', - ['artifact_id']), - ('ix_glare_artifact_properties_name', ['name'])] - prop_columns = ['id', - 'artifact_id', - 'name', - 'string_value', - 'int_value', - 'numeric_value', - 'bool_value', - 'key_name', - 'position'] - self.assert_table(engine, 'glare_artifact_properties', prop_indices, - prop_columns) - - blobs_indices = [ - ('ix_glare_artifact_blobs_artifact_id', ['artifact_id']), - ('ix_glare_artifact_blobs_name', ['name'])] - blobs_columns = ['id', - 'artifact_id', - 'size', - 'md5', - 'sha1', - 'sha256', - 'name', - 'key_name', - 'external', - 'status', - 'content_type', - 'url'] - self.assert_table(engine, 'glare_artifact_blobs', blobs_indices, - blobs_columns) - - locks_indices = [] - locks_columns = ['id'] - self.assert_table(engine, 'glare_artifact_locks', locks_indices, - locks_columns) - - def _check_002(self, engine, data): - locks_indices = [] - locks_columns = ['id', 'acquired_at'] - self.assert_table(engine, 'glare_artifact_locks', locks_indices, - locks_columns) - - def _check_003(self, engine, data): - locks_indices = [] - locks_columns = ['id', 'data'] - self.assert_table(engine, 'glare_blob_data', locks_indices, - locks_columns) - - -class TestMigrationsMySQL(GlareMigrationsCheckers, - WalkVersionsMixin, - test_base.MySQLOpportunisticTestCase): - pass - - -class TestMigrationsPostgreSQL(GlareMigrationsCheckers, - WalkVersionsMixin, - test_base.PostgreSQLOpportunisticTestCase): - pass - - -class TestMigrationsSqlite(GlareMigrationsCheckers, - WalkVersionsMixin, - test_base.DbTestCase,): - pass diff --git a/glare/tests/unit/glare_fixtures.py b/glare/tests/unit/glare_fixtures.py deleted file mode 100644 index 926a520..0000000 --- a/glare/tests/unit/glare_fixtures.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import fixtures - -from glare.common import exception - - -class BannedDBSchemaOperations(fixtures.Fixture): - """Ban some operations for migrations""" - def __init__(self, banned_resources=None): - super(BannedDBSchemaOperations, self).__init__() - self._banned_resources = banned_resources or [] - - @staticmethod - def _explode(resource, op): - raise exception.DBNotAllowed( - 'Operation %s.%s() is not allowed in a database migration' % ( - resource, op)) - - def setUp(self): - super(BannedDBSchemaOperations, self).setUp() - for thing in self._banned_resources: - self.useFixture(fixtures.MonkeyPatch( - 'sqlalchemy.%s.drop' % thing, - lambda *a, **k: self._explode(thing, 'drop'))) - self.useFixture(fixtures.MonkeyPatch( - 'sqlalchemy.%s.alter' % thing, - lambda *a, **k: self._explode(thing, 'alter'))) diff --git a/glare/tests/unit/middleware/__init__.py b/glare/tests/unit/middleware/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare/tests/unit/middleware/test_context.py b/glare/tests/unit/middleware/test_context.py deleted file mode 100644 index a93d4ab..0000000 --- a/glare/tests/unit/middleware/test_context.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import webob - -from glare.api.middleware import context -from glare.common import exception as exc -from glare.tests.unit import base - - -class TestContextMiddleware(base.BaseTestCase): - def _build_request(self, roles=None, identity_status='Confirmed', - service_catalog=None): - req = webob.Request.blank('/') - req.headers['x-auth-token'] = 'token1' - req.headers['x-identity-status'] = identity_status - req.headers['x-user-id'] = 'user1' - req.headers['x-tenant-id'] = 'tenant1' - _roles = roles or ['role1', 'role2'] - req.headers['x-roles'] = ','.join(_roles) - if service_catalog: - req.headers['x-service-catalog'] = service_catalog - - return req - - def _build_middleware(self): - return context.ContextMiddleware(None) - - def test_header_parsing(self): - req = self._build_request() - self._build_middleware().process_request(req) - self.assertEqual('token1', req.context.auth_token) - self.assertEqual('user1', req.context.user) - self.assertEqual('tenant1', req.context.tenant) - self.assertEqual(['role1', 'role2'], req.context.roles) - - def test_is_admin_flag(self): - # is_admin check should look for 'admin' role by default - req = self._build_request(roles=['admin', 'role2']) - self._build_middleware().process_request(req) - self.assertTrue(req.context.is_admin) - - # without the 'admin' role, is_admin should be False - req = self._build_request() - self._build_middleware().process_request(req) - self.assertFalse(req.context.is_admin) - - # if we change the admin_role attribute, we should be able to use it - req = self._build_request() - self.policy(context_is_admin='role:role1') - self._build_middleware().process_request(req) - self.assertTrue(req.context.is_admin) - - def test_roles_case_insensitive(self): - # accept role from request - req = self._build_request(roles=['Admin', 'role2']) - self._build_middleware().process_request(req) - self.assertTrue(req.context.is_admin) - - # accept role from config - req = self._build_request(roles=['role1']) - self.policy(context_is_admin='role:rOLe1') - self._build_middleware().process_request(req) - self.assertTrue(req.context.is_admin) - - def test_roles_stripping(self): - # stripping extra spaces in request - req = self._build_request(roles=['\trole1']) - self.policy(context_is_admin='role:role1') - self._build_middleware().process_request(req) - self.assertTrue(req.context.is_admin) - - def test_anonymous_access_enabled(self): - req = self._build_request(identity_status='Nope') - self.config(allow_anonymous_access=True) - middleware = self._build_middleware() - middleware.process_request(req) - self.assertIsNone(req.context.auth_token) - self.assertIsNone(req.context.user) - self.assertIsNone(req.context.tenant) - self.assertEqual([], req.context.roles) - self.assertFalse(req.context.is_admin) - self.assertTrue(req.context.read_only) - - def test_anonymous_access_defaults_to_disabled(self): - req = self._build_request(identity_status='Nope') - middleware = self._build_middleware() - self.assertRaises(exc.Unauthorized, - middleware.process_request, req) - - def test_service_catalog(self): - catalog_json = "[{}]" - req = self._build_request(service_catalog=catalog_json) - self._build_middleware().process_request(req) - self.assertEqual([{}], req.context.service_catalog) - - def test_invalid_service_catalog(self): - catalog_json = "bad json" - req = self._build_request(service_catalog=catalog_json) - middleware = self._build_middleware() - self.assertRaises(exc.GlareException, - middleware.process_request, req) - - def test_response(self): - req = self._build_request() - req.context = context.RequestContext() - request_id = req.context.request_id - - resp = webob.Response() - resp.request = req - self._build_middleware().process_response(resp) - self.assertEqual(request_id, resp.headers['x-openstack-request-id']) - resp_req_id = resp.headers['x-openstack-request-id'] - # Validate that request-id do not starts with 'req-req-' - if isinstance(resp_req_id, bytes): - resp_req_id = resp_req_id.decode('utf-8') - self.assertFalse(resp_req_id.startswith('req-req-')) - self.assertTrue(resp_req_id.startswith('req-')) diff --git a/glare/tests/unit/middleware/test_fault.py b/glare/tests/unit/middleware/test_fault.py deleted file mode 100644 index b98f959..0000000 --- a/glare/tests/unit/middleware/test_fault.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from functools import partial -import inspect - -import mock -from oslo_config import cfg -from oslo_log import log as logging - -from glare.api.middleware import fault -from glare.common import exception as exc -from glare.tests.unit import base - -CONF = cfg.CONF -logging.register_options(CONF) - - -class TestFaultMiddleware(base.BaseTestCase): - - @staticmethod - def get_response(value=None, exception=Exception): - if value is None: - raise exception - return value - - def _build_middleware(self): - return fault.GlareFaultWrapperFilter(None) - - def test_no_exception(self): - req = mock.Mock() - req.get_response.return_value = 'Response object' - with mock.patch.object(fault.Fault, '__init__') as mocked_fault: - res = self._build_middleware()(req) - self.assertEqual('Response object', res) - self.assertEqual(0, mocked_fault.call_count) - - def test_exceptions(self): - req = mock.Mock() - error_map = fault.GlareFaultWrapperFilter.error_map - - # Raise all exceptions from error_map - for name, obj in inspect.getmembers(exc, inspect.isclass): - if not issubclass(obj, Exception)\ - or obj is exc.InvalidGlobalAPIVersion: - continue - req.get_response.side_effect = partial(self.get_response, - exception=obj) - res = self._build_middleware()(req) - - while name not in error_map: - obj = obj.__base__ - name = obj.__name__ - self.assertEqual(error_map[name].code, res.error['code']) - - # Raise other possible exceptions that lead to 500 error - for e in (Exception, ValueError, TypeError, exc.GlareException): - req.get_response.side_effect = partial( - self.get_response, exception=e) - res = self._build_middleware()(req) - self.assertEqual(500, res.error['code']) - - # InvalidGlobalAPIVersion should also include min_version and - # max_version headers - req.get_response.side_effect = partial( - self.get_response, exception=exc.InvalidGlobalAPIVersion( - req_ver=100.0, min_ver=1.0, max_ver=1.1)) - res = self._build_middleware()(req) - self.assertEqual(406, res.error['code']) - self.assertEqual(1.0, res.error['min_version']) - self.assertEqual(1.1, res.error['max_version']) - - def test_trace_marker(self): - req = mock.Mock() - self.config(debug=True) - traceback_marker = 'Traceback (most recent call last)' - pref = "PREFIX" - suff = "SUFFIX" - - # Test with marker - req.get_response.side_effect = partial( - self.get_response, exception=ValueError( - pref + traceback_marker + suff)) - res = self._build_middleware()(req) - self.assertEqual(500, res.error['code']) - self.assertEqual(pref, res.error['error']['message']) - self.assertEqual(traceback_marker + suff, - res.error['error']['traceback']) - - # Test without marker - req.get_response.side_effect = partial( - self.get_response, exception=ValueError( - pref + suff)) - res = self._build_middleware()(req) - self.assertEqual(500, res.error['code']) - self.assertEqual(pref + suff, res.error['error']['message']) - self.assertIn(traceback_marker, res.error['error']['traceback']) - - def test_fault_class(self): - req = mock.Mock() - req.get_response.side_effect = partial( - self.get_response, exception=exc.BadRequest) - res = self._build_middleware()(req)(req) - self.assertEqual(400, res.status_code) - self.assertEqual('400 Bad Request', res.status) diff --git a/glare/tests/unit/middleware/test_keycloak_auth.py b/glare/tests/unit/middleware/test_keycloak_auth.py deleted file mode 100644 index a4e7806..0000000 --- a/glare/tests/unit/middleware/test_keycloak_auth.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import mock -import requests -import webob - -from glare.api.middleware import keycloak_auth -from glare.common import exception as exc -from glare.tests.unit import base - - -class TestKeycloakAuthMiddleware(base.BaseTestCase): - def _build_request(self, token): - req = webob.Request.blank("/") - req.headers["x-auth-token"] = token - req.get_response = lambda app: None - return req - - def _build_middleware(self): - return keycloak_auth.KeycloakAuthMiddleware(None) - - @mock.patch("requests.get") - def test_header_parsing(self, mocked_get): - token = { - "iss": "http://localhost:8080/auth/realms/my_realm", - "realm_access": { - "roles": ["role1", "role2"] - } - } - mocked_resp = mock.Mock() - mocked_resp.status_code = 200 - mocked_resp.json.return_value = '{"user": "mike"}' - mocked_get.return_value = mocked_resp - - req = self._build_request(token) - with mock.patch("jwt.decode", return_value=token): - self._build_middleware()(req) - self.assertEqual("Confirmed", req.headers["X-Identity-Status"]) - self.assertEqual("my_realm", req.headers["X-Project-Id"]) - self.assertEqual("role1,role2", req.headers["X-Roles"]) - self.assertEqual(1, mocked_get.call_count) - - def test_no_auth_token(self): - req = webob.Request.blank("/") - self.assertRaises(exc.Unauthorized, self._build_middleware(), req) - - @mock.patch("requests.get") - def test_no_realm_access(self, mocked_get): - token = { - "iss": "http://localhost:8080/auth/realms/my_realm", - } - mocked_resp = mock.Mock() - mocked_resp.status_code = 200 - mocked_resp.json.return_value = '{"user": "mike"}' - mocked_get.return_value = mocked_resp - - req = self._build_request(token) - with mock.patch("jwt.decode", return_value=token): - self._build_middleware()(req) - self.assertEqual("Confirmed", req.headers["X-Identity-Status"]) - self.assertEqual("my_realm", req.headers["X-Project-Id"]) - self.assertEqual("", req.headers["X-Roles"]) - - def test_wrong_token_format(self): - req = self._build_request(token="WRONG_FORMAT_TOKEN") - self.assertRaises(exc.Unauthorized, self._build_middleware(), req) - - @mock.patch("requests.get") - def test_server_unauthorized(self, mocked_get): - token = { - "iss": "http://localhost:8080/auth/realms/my_realm", - } - mocked_resp = mock.Mock() - mocked_resp.status_code = 401 - mocked_resp.json.return_value = '{"user": "mike"}' - mocked_get.return_value = mocked_resp - - req = self._build_request(token) - with mock.patch("jwt.decode", return_value=token): - self.assertRaises(exc.Unauthorized, self._build_middleware(), req) - - @mock.patch("requests.get") - def test_server_forbidden(self, mocked_get): - token = { - "iss": "http://localhost:8080/auth/realms/my_realm", - } - mocked_resp = mock.Mock() - mocked_resp.status_code = 403 - mocked_resp.json.return_value = '{"user": "mike"}' - mocked_get.return_value = mocked_resp - - req = self._build_request(token) - with mock.patch("jwt.decode", return_value=token): - self.assertRaises(exc.Forbidden, self._build_middleware(), req) - - @mock.patch("requests.get") - def test_server_exception(self, mocked_get): - token = { - "iss": "http://localhost:8080/auth/realms/my_realm", - } - mocked_resp = mock.Mock() - mocked_resp.status_code = 500 - mocked_resp.json.return_value = '{"user": "mike"}' - mocked_get.return_value = mocked_resp - - req = self._build_request(token) - with mock.patch("jwt.decode", return_value=token): - self.assertRaises( - exc.GlareException, self._build_middleware(), req) - - @mock.patch("requests.get") - def test_connection_error(self, mocked_get): - token = { - "iss": "http://localhost:8080/auth/realms/my_realm", - } - mocked_get.side_effect = requests.ConnectionError - - req = self._build_request(token) - with mock.patch("jwt.decode", return_value=token): - self.assertRaises( - exc.GlareException, self._build_middleware(), req) - - @mock.patch("requests.get") - def test_userinfo_endpoint_empty(self, mocked_get): - self.config(user_info_endpoint_url='', - group='keycloak_oidc') - token = { - "iss": "http://localhost:8080/auth/realms/my_realm", - "realm_access": { - "roles": ["role1", "role2"] - } - } - - req = self._build_request(token) - with mock.patch("jwt.decode", return_value=token): - self._build_middleware()(req) - self.assertEqual("Confirmed", req.headers["X-Identity-Status"]) - self.assertEqual("my_realm", req.headers["X-Project-Id"]) - self.assertEqual("role1,role2", req.headers["X-Roles"]) - self.assertEqual(0, mocked_get.call_count) diff --git a/glare/tests/unit/middleware/test_trusted_auth.py b/glare/tests/unit/middleware/test_trusted_auth.py deleted file mode 100644 index cb8b29b..0000000 --- a/glare/tests/unit/middleware/test_trusted_auth.py +++ /dev/null @@ -1,173 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import webob - -from glare.api.middleware import context -from glare.common import exception as exc -from glare.tests.unit import base - - -class TestTrustedAuthMiddleware(base.BaseTestCase): - def _build_request(self, token): - req = webob.Request.blank("/") - req.headers["x-auth-token"] = token - req.get_response = lambda app: None - return req - - def _build_middleware(self): - return context.TrustedAuthMiddleware(None) - - def test_header_parsing(self): - token = 'user1:tenant1:role1,role2' - req = self._build_request(token) - self._build_middleware().process_request(req) - - self.assertEqual("Confirmed", req.headers["X-Identity-Status"]) - self.assertEqual("user1", req.headers["X-User-Id"]) - self.assertEqual("tenant1", req.headers["X-Tenant-Id"]) - self.assertEqual("role1,role2", req.headers["X-Roles"]) - - self.assertEqual(token, req.context.auth_token) - self.assertEqual('user1', req.context.user) - self.assertEqual('tenant1', req.context.tenant) - self.assertEqual(['role1', 'role2'], req.context.roles) - self.assertIn('service_catalog', req.context.to_dict()) - - def test_no_auth_token(self): - req = self._build_request(None) - del req.headers['x-auth-token'] - self.assertRaises(exc.Unauthorized, - self._build_middleware().process_request, req) - - def test_wrong_format(self): - req = self._build_request('WRONG_FORMAT') - middleware = self._build_middleware() - self.assertRaises(exc.Unauthorized, - middleware.process_request, req) - - req = self._build_request('user1:tenant1:role1:role2') - self.assertRaises(exc.Unauthorized, - middleware.process_request, req) - - def test_no_tenant(self): - req = self._build_request('user1::role') - middleware = self._build_middleware() - self.assertRaises(exc.Unauthorized, - middleware.process_request, req) - - def test_no_roles(self): - # stripping extra spaces in request - req = self._build_request('user1:tenant1:') - self._build_middleware().process_request(req) - self.assertFalse(req.context.is_admin) - self.assertEqual('user1', req.context.user) - self.assertEqual("user1", req.headers["X-User-Id"]) - self.assertEqual("", req.headers["X-Roles"]) - self.assertEqual([], req.context.roles) - - def test_is_admin_flag(self): - # is_admin check should look for 'admin' role by default - req = self._build_request('user1:tenant1:role1,admin') - self._build_middleware().process_request(req) - self.assertTrue(req.context.is_admin) - - # without the 'admin' role, is_admin should be False - req = self._build_request('user1:tenant1:role1,role2') - self._build_middleware().process_request(req) - self.assertFalse(req.context.is_admin) - - # if we change the admin_role attribute, we should be able to use it - req = self._build_request('user1:tenant1:role1,role2') - self.policy(context_is_admin='role:role1') - self._build_middleware().process_request(req) - self.assertTrue(req.context.is_admin) - - def test_roles_case_insensitive(self): - # accept role from request - req = self._build_request('user1:tenant1:Admin,role2') - self._build_middleware().process_request(req) - self.assertTrue(req.context.is_admin) - - # accept role from config - req = self._build_request('user1:tenant1:role1,role2') - self.policy(context_is_admin='role:rOLe1') - self._build_middleware().process_request(req) - self.assertTrue(req.context.is_admin) - - def test_token_stripping(self): - # stripping extra spaces in request - req = self._build_request(' user1:tenant1:role1\t') - self.policy(context_is_admin='role:role1') - self._build_middleware().process_request(req) - self.assertTrue(req.context.is_admin) - self.assertEqual('user1', req.context.user) - self.assertEqual("user1", req.headers["X-User-Id"]) - self.assertEqual("role1", req.headers["X-Roles"]) - - def test_anonymous_access_enabled(self): - req = self._build_request('user1:none:role1,role2') - self.config(allow_anonymous_access=True) - middleware = self._build_middleware() - middleware.process_request(req) - self.assertIsNone(req.context.auth_token) - self.assertIsNone(req.context.user) - self.assertIsNone(req.context.tenant) - self.assertEqual([], req.context.roles) - self.assertFalse(req.context.is_admin) - self.assertTrue(req.context.read_only) - - def test_anonymous_access_defaults_to_disabled(self): - req = self._build_request('user1:none:role1,role2') - middleware = self._build_middleware() - self.assertRaises(exc.Unauthorized, - middleware.process_request, req) - - def test_response(self): - req = self._build_request('user1:tenant1:role1,role2') - req.context = context.RequestContext() - request_id = req.context.request_id - - resp = webob.Response() - resp.request = req - self._build_middleware().process_response(resp) - self.assertEqual(request_id, resp.headers['x-openstack-request-id']) - resp_req_id = resp.headers['x-openstack-request-id'] - # Validate that request-id do not starts with 'req-req-' - if isinstance(resp_req_id, bytes): - resp_req_id = resp_req_id.decode('utf-8') - self.assertFalse(resp_req_id.startswith('req-req-')) - self.assertTrue(resp_req_id.startswith('req-')) - - def test_response_no_request_id(self): - req = self._build_request('user1:tenant1:role1,role2') - req.context = context.RequestContext() - del req.context.request_id - - resp = webob.Response() - resp.request = req - self._build_middleware().process_response(resp) - self.assertNotIn('x-openstack-request-id', resp.headers) - - def test_response_no_request_id_prefix(self): - # prefix is 'req-' - req = self._build_request('user1:tenant1:role1,role2') - req.context = context.RequestContext() - req.context.request_id = "STRING_WITH_NO_PREFIX" - - resp = webob.Response() - resp.request = req - self._build_middleware().process_response(resp) - self.assertEqual('req-STRING_WITH_NO_PREFIX', - resp.headers['x-openstack-request-id']) diff --git a/glare/tests/unit/middleware/test_version_negotiations.py b/glare/tests/unit/middleware/test_version_negotiations.py deleted file mode 100644 index 7ecd590..0000000 --- a/glare/tests/unit/middleware/test_version_negotiations.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import webob - -from glare.api.middleware import version_negotiation -from glare.common import exception as exc -from glare.tests.unit import base - - -class TestContextMiddleware(base.BaseTestCase): - MIME_TYPE = 'application/vnd.openstack.artifacts-' - - def _build_request(self, accept, path_info): - req = webob.Request.blank(path_info) - req.accept = accept - return req - - def _build_middleware(self): - return version_negotiation.GlareVersionNegotiationFilter(None) - - def test_version_request(self): - for path_info in ('/', '/versions'): - expected = { - "versions": [{ - "status": "EXPERIMENTAL", - "min_version": "1.0", - "version": "1.0", - "id": "v1.0", - "links": [{"href": "http://localhost/", "rel": "self"}] - }] - } - req = self._build_request(self.MIME_TYPE + '1.0', path_info) - res = self._build_middleware().process_request(req) - self.assertEqual(expected, res.json_body) - - def test_wrong_version(self): - req = self._build_request(self.MIME_TYPE + 'INVALID', '/artifacts') - self.assertRaises(exc.BadRequest, - self._build_middleware().process_request, req) - - def test_too_big_version(self): - req = self._build_request(self.MIME_TYPE + '10000.0', '/artifacts') - self.assertRaises(exc.InvalidGlobalAPIVersion, - self._build_middleware().process_request, req) - - def test_latest_version(self): - req = self._build_request(self.MIME_TYPE + 'latest', '/artifacts') - self._build_middleware().process_request(req) - self.assertEqual('1.0', req.api_version_request.get_string()) - - def test_version_unknown(self): - req = self._build_request('UNKNOWN', '/artifacts') - self._build_middleware().process_request(req) - self.assertEqual('1.0', req.api_version_request.get_string()) - - def test_response(self): - res = webob.Response() - req = self._build_request('1.0', '/artifacts') - mw = self._build_middleware() - mw.process_request(req) - mw.process_response(res, req) - self.assertIn('openstack-api-version', res.headers) - self.assertEqual('artifact 1.0', res.headers['openstack-api-version']) - self.assertIn('Vary', res.headers) - self.assertEqual('openstack-api-version', res.headers['Vary']) diff --git a/glare/tests/unit/test_fixtures.py b/glare/tests/unit/test_fixtures.py deleted file mode 100644 index eddc127..0000000 --- a/glare/tests/unit/test_fixtures.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sqlalchemy -import testtools - -from glare.common import exception -from glare.tests.unit import glare_fixtures - - -class TestBannedDBSchemaOperations(testtools.TestCase): - def test_column(self): - column = sqlalchemy.Column() - with glare_fixtures.BannedDBSchemaOperations(['Column']): - self.assertRaises(exception.DBNotAllowed, - column.drop) - self.assertRaises(exception.DBNotAllowed, - column.alter) - - def test_table(self): - table = sqlalchemy.Table() - with glare_fixtures.BannedDBSchemaOperations(['Table']): - self.assertRaises(exception.DBNotAllowed, - table.drop) - self.assertRaises(exception.DBNotAllowed, - table.alter) diff --git a/glare/tests/unit/test_hacking.py b/glare/tests/unit/test_hacking.py deleted file mode 100644 index 6eca789..0000000 --- a/glare/tests/unit/test_hacking.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import inspect - -from glare.hacking import checks -from glare.tests.unit import base - - -class HackingTestCase(base.BaseTestCase): - def test_assert_true_instance(self): - self.assertEqual(1, len(list(checks.assert_true_instance( - "self.assertTrue(isinstance(e, " - "exception.BuildAbortException))")))) - - self.assertEqual( - 0, len(list(checks.assert_true_instance("self.assertTrue()")))) - - def test_assert_equal_type(self): - self.assertEqual(1, len(list(checks.assert_equal_type( - "self.assertEqual(type(als['QuicAssist']), list)")))) - - self.assertEqual( - 0, len(list(checks.assert_equal_type("self.assertTrue()")))) - - def test_assert_equal_none(self): - self.assertEqual(1, len(list(checks.assert_equal_none( - "self.assertEqual(A, None)")))) - - self.assertEqual(1, len(list(checks.assert_equal_none( - "self.assertEqual(None, A)")))) - - self.assertEqual( - 0, len(list(checks.assert_equal_none("self.assertIsNone()")))) - - def test_no_translate_logs(self): - for log in checks._all_log_levels: - bad = 'LOG.%s(_("Bad"))' % log - self.assertEqual(1, len(list(checks.no_translate_logs(bad)))) - # Catch abuses when used with a variable and not a literal - bad = 'LOG.%s(_(msg))' % log - self.assertEqual(1, len(list(checks.no_translate_logs(bad)))) - - def test_no_direct_use_of_unicode_function(self): - self.assertEqual(1, len(list(checks.no_direct_use_of_unicode_function( - "unicode('the party don't start til the unicode walks in')")))) - self.assertEqual(1, len(list(checks.no_direct_use_of_unicode_function( - """unicode('something ' - 'something else""")))) - self.assertEqual(0, len(list(checks.no_direct_use_of_unicode_function( - "six.text_type('party over')")))) - self.assertEqual(0, len(list(checks.no_direct_use_of_unicode_function( - "not_actually_unicode('something completely different')")))) - - def test_no_contextlib_nested(self): - self.assertEqual(1, len(list(checks.check_no_contextlib_nested( - "with contextlib.nested(")))) - - self.assertEqual(1, len(list(checks.check_no_contextlib_nested( - "with nested(")))) - - self.assertEqual(0, len(list(checks.check_no_contextlib_nested( - "with foo as bar")))) - - def test_dict_constructor_with_list_copy(self): - self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( - " dict([(i, connect_info[i])")))) - - self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( - " attrs = dict([(k, _from_json(v))")))) - - self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( - " type_names = dict((value, key) for key, value in")))) - - self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( - " dict((value, key) for key, value in")))) - - self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( - "foo(param=dict((k, v) for k, v in bar.items()))")))) - - self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( - " dict([[i,i] for i in range(3)])")))) - - self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( - " dd = dict([i,i] for i in range(3))")))) - - self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy( - " create_kwargs = dict(snapshot=snapshot,")))) - - self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy( - " self._render_dict(xml, data_el, data.__dict__)")))) - - def test_check_python3_xrange(self): - func = checks.check_python3_xrange - self.assertEqual(1, len(list(func('for i in xrange(10)')))) - self.assertEqual(1, len(list(func('for i in xrange (10)')))) - self.assertEqual(0, len(list(func('for i in range(10)')))) - self.assertEqual(0, len(list(func('for i in six.moves.range(10)')))) - self.assertEqual(0, len(list(func('testxrange(10)')))) - - def test_dict_iteritems(self): - self.assertEqual(1, len(list(checks.check_python3_no_iteritems( - "obj.iteritems()")))) - - self.assertEqual(0, len(list(checks.check_python3_no_iteritems( - "six.iteritems(obj)")))) - - self.assertEqual(0, len(list(checks.check_python3_no_iteritems( - "obj.items()")))) - - def test_dict_iterkeys(self): - self.assertEqual(1, len(list(checks.check_python3_no_iterkeys( - "obj.iterkeys()")))) - - self.assertEqual(0, len(list(checks.check_python3_no_iterkeys( - "six.iterkeys(obj)")))) - - self.assertEqual(0, len(list(checks.check_python3_no_iterkeys( - "obj.keys()")))) - - def test_dict_itervalues(self): - self.assertEqual(1, len(list(checks.check_python3_no_itervalues( - "obj.itervalues()")))) - - self.assertEqual(0, len(list(checks.check_python3_no_itervalues( - "six.itervalues(ob)")))) - - self.assertEqual(0, len(list(checks.check_python3_no_itervalues( - "obj.values()")))) - - def test_factory(self): - class Register(object): - def __init__(self): - self.funcs = [] - - def __call__(self, func): - self.funcs.append(func) - - register = Register() - checks.factory(register) - for name, func in inspect.getmembers(checks, inspect.isfunction): - if name != 'factory': - self.assertIn(func, register.funcs) diff --git a/glare/tests/unit/test_multistore.py b/glare/tests/unit/test_multistore.py deleted file mode 100644 index b87b715..0000000 --- a/glare/tests/unit/test_multistore.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2017 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from glare.objects.meta import registry -from glare.tests.unit import base - - -class TestMultistore(base.BaseTestCase): - - def test_multistore(self): - types = {'images': 'swift', - 'heat_templates': 'rbd', 'heat_environments': '', - 'tosca_templates': 'sheepdog', - 'murano_packages': 'vmware_store', - 'sample_artifact': 'database', - 'hooks_artifact': 'database'} - - self.config( - enabled_artifact_types=[":".join(_) for _ in types.items()]) - registry.ArtifactRegistry.register_all_artifacts() - - for t in registry.ArtifactRegistry.obj_classes().values(): - name = t[0].get_type_name() - if name == 'all': - continue - self.assertEqual(t[0].get_default_store(), types[name]) diff --git a/glare/tests/unit/test_utils.py b/glare/tests/unit/test_utils.py deleted file mode 100644 index 6c7bbbb..0000000 --- a/glare/tests/unit/test_utils.py +++ /dev/null @@ -1,281 +0,0 @@ -# Copyright 2016 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import tempfile - -import mock -from OpenSSL import crypto -import six - -from glare.common import exception as exc -from glare.common import utils -from glare.tests.unit import base - - -class TestUtils(base.BaseTestCase): - """Test class for glare.common.utils""" - - def test_validate_quotes(self): - self.assertIsNone(utils.validate_quotes('"classic"')) - self.assertIsNone(utils.validate_quotes('This is a good string')) - self.assertIsNone(utils.validate_quotes - ('"comma after quotation mark should work",')) - self.assertIsNone(utils.validate_quotes - (',"comma before quotation mark should work"')) - self.assertIsNone(utils.validate_quotes('"we have quotes \\" inside"')) - - def test_validate_quotes_negative(self): - self.assertRaises(exc.InvalidParameterValue, - utils.validate_quotes, 'not_comma"blabla"') - self.assertRaises(exc.InvalidParameterValue, utils.validate_quotes, - '"No comma after quotation mark"Not_comma') - self.assertRaises(exc.InvalidParameterValue, - utils.validate_quotes, '"The quote is not closed') - - def test_no_4bytes_params(self): - @utils.no_4byte_params - def test_func(*args, **kwargs): - return args, kwargs - - bad_char = u'\U0001f62a' - - # params without 4bytes unicode are okay - args, kwargs = test_func('val1', param='val2') - self.assertEqual(('val1',), args) - self.assertEqual({'param': 'val2'}, kwargs) - - # test various combinations with bad param - self.assertRaises(exc.BadRequest, test_func, - bad_char) - self.assertRaises(exc.BadRequest, test_func, - **{bad_char: 'val1'}) - self.assertRaises(exc.BadRequest, test_func, - **{'param': bad_char}) - - -class TestReaders(base.BaseTestCase): - """Test various readers in glare.common.utils""" - - def test_cooperative_reader_iterator(self): - """Ensure cooperative reader class accesses all bytes of file""" - BYTES = 1024 - bytes_read = 0 - with tempfile.TemporaryFile('w+') as tmp_fd: - tmp_fd.write('*' * BYTES) - tmp_fd.seek(0) - for chunk in utils.CooperativeReader(tmp_fd): - bytes_read += len(chunk) - - self.assertEqual(BYTES, bytes_read) - - def test_cooperative_reader_explicit_read(self): - BYTES = 1024 - bytes_read = 0 - with tempfile.TemporaryFile('w+') as tmp_fd: - tmp_fd.write('*' * BYTES) - tmp_fd.seek(0) - reader = utils.CooperativeReader(tmp_fd) - byte = reader.read(1) - while len(byte) != 0: - bytes_read += 1 - byte = reader.read(1) - - self.assertEqual(BYTES, bytes_read) - - def test_cooperative_reader_no_read_method(self): - BYTES = 1024 - stream = [b'*'] * BYTES - reader = utils.CooperativeReader(stream) - bytes_read = 0 - byte = reader.read() - while len(byte) != 0: - bytes_read += 1 - byte = reader.read() - - self.assertEqual(BYTES, bytes_read) - - # some data may be left in the buffer - reader = utils.CooperativeReader(stream) - reader.buffer = 'some data' - buffer_string = reader.read() - self.assertEqual('some data', buffer_string) - - def test_cooperative_reader_no_read_method_buffer_size(self): - # Decrease buffer size to 1000 bytes to test its overflow - with mock.patch('glare.common.utils.MAX_COOP_READER_BUFFER_SIZE', - 1000): - BYTES = 1024 - stream = [b'*'] * BYTES - reader = utils.CooperativeReader(stream) - # Reading 1001 bytes to the buffer leads to 413 error - self.assertRaises(exc.RequestEntityTooLarge, reader.read, 1001) - - def test_cooperative_reader_of_iterator(self): - """Ensure cooperative reader supports iterator backends too""" - data = b'abcdefgh' - data_list = [data[i:i + 1] * 3 for i in range(len(data))] - reader = utils.CooperativeReader(data_list) - chunks = [] - while True: - chunks.append(reader.read(3)) - if chunks[-1] == b'': - break - meat = b''.join(chunks) - self.assertEqual(b'aaabbbcccdddeeefffggghhh', meat) - - def test_cooperative_reader_of_iterator_stop_iteration_err(self): - """Ensure cooperative reader supports iterator backends too""" - reader = utils.CooperativeReader([l * 3 for l in '']) - chunks = [] - while True: - chunks.append(reader.read(3)) - if chunks[-1] == b'': - break - meat = b''.join(chunks) - self.assertEqual(b'', meat) - - def _create_generator(self, chunk_size, max_iterations): - chars = b'abc' - iteration = 0 - while True: - index = iteration % len(chars) - chunk = chars[index:index + 1] * chunk_size - yield chunk - iteration += 1 - if iteration >= max_iterations: - raise StopIteration() - - def _test_reader_chunked(self, chunk_size, read_size, max_iterations=5): - generator = self._create_generator(chunk_size, max_iterations) - reader = utils.CooperativeReader(generator) - result = bytearray() - while True: - data = reader.read(read_size) - if len(data) == 0: - break - self.assertLessEqual(len(data), read_size) - result += data - expected = (b'a' * chunk_size + - b'b' * chunk_size + - b'c' * chunk_size + - b'a' * chunk_size + - b'b' * chunk_size) - self.assertEqual(expected, bytes(result)) - - def test_cooperative_reader_preserves_size_chunk_less_then_read(self): - self._test_reader_chunked(43, 101) - - def test_cooperative_reader_preserves_size_chunk_equals_read(self): - self._test_reader_chunked(1024, 1024) - - def test_cooperative_reader_preserves_size_chunk_more_then_read(self): - chunk_size = 16 * 1024 * 1024 # 16 Mb, as in remote http source - read_size = 8 * 1024 # 8k, as in httplib - self._test_reader_chunked(chunk_size, read_size) - - def test_limiting_reader(self): - """Ensure limiting reader class accesses all bytes of file""" - BYTES = 1024 - bytes_read = 0 - data = six.BytesIO(b"*" * BYTES) - for chunk in utils.LimitingReader(data, BYTES): - bytes_read += len(chunk) - - self.assertEqual(BYTES, bytes_read) - - bytes_read = 0 - data = six.BytesIO(b"*" * BYTES) - reader = utils.LimitingReader(data, BYTES) - byte = reader.read(1) - while len(byte) != 0: - bytes_read += 1 - byte = reader.read(1) - - self.assertEqual(BYTES, bytes_read) - - def test_limiting_reader_fails(self): - """Ensure limiting reader class throws exceptions if limit exceeded""" - BYTES = 1024 - - def _consume_all_iter(): - bytes_read = 0 - data = six.BytesIO(b"*" * BYTES) - for chunk in utils.LimitingReader(data, BYTES - 1): - bytes_read += len(chunk) - - self.assertRaises(exc.RequestEntityTooLarge, _consume_all_iter) - - def _consume_all_read(): - bytes_read = 0 - data = six.BytesIO(b"*" * BYTES) - reader = utils.LimitingReader(data, BYTES - 1) - byte = reader.read(1) - while len(byte) != 0: - bytes_read += 1 - byte = reader.read(1) - - self.assertRaises(exc.RequestEntityTooLarge, _consume_all_read) - - def test_blob_iterator(self): - BYTES = 1024 - bytes_read = 0 - stream = [b'*'] * BYTES - for chunk in utils.BlobIterator(stream, 64): - bytes_read += len(chunk) - - self.assertEqual(BYTES, bytes_read) - - -class TestKeyCert(base.BaseTestCase): - - def test_validate_key_cert_key(self): - var_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), - '../', 'var')) - keyfile = os.path.join(var_dir, 'privatekey.key') - certfile = os.path.join(var_dir, 'certificate.crt') - utils.validate_key_cert(keyfile, certfile) - - def test_validate_key_cert_no_private_key(self): - with tempfile.NamedTemporaryFile('w+') as tmpf: - self.assertRaises(RuntimeError, - utils.validate_key_cert, - "/not/a/file", tmpf.name) - - def test_validate_key_cert_cert_cant_read(self): - with tempfile.NamedTemporaryFile('w+') as keyf: - with tempfile.NamedTemporaryFile('w+') as certf: - os.chmod(certf.name, 0) - self.assertRaises(RuntimeError, - utils.validate_key_cert, - keyf.name, certf.name) - - def test_validate_key_cert_key_cant_read(self): - with tempfile.NamedTemporaryFile('w+') as keyf: - with tempfile.NamedTemporaryFile('w+') as certf: - os.chmod(keyf.name, 0) - self.assertRaises(RuntimeError, - utils.validate_key_cert, - keyf.name, certf.name) - - def test_validate_key_cert_key_crypto_error(self): - var_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), - '../', 'var')) - keyfile = os.path.join(var_dir, 'privatekey.key') - certfile = os.path.join(var_dir, 'certificate.crt') - with mock.patch('OpenSSL.crypto.verify', side_effect=crypto.Error): - self.assertRaises(RuntimeError, - utils.validate_key_cert, - keyfile, certfile) diff --git a/glare/tests/unit/test_validation_hooks.py b/glare/tests/unit/test_validation_hooks.py deleted file mode 100644 index 84af163..0000000 --- a/glare/tests/unit/test_validation_hooks.py +++ /dev/null @@ -1,191 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -from glare.common import exception as exc -from glare.tests.unit import base - - -class TestArtifactHooks(base.BaseTestArtifactAPI): - - def setUp(self): - super(TestArtifactHooks, self).setUp() - values = {'name': 'ttt', 'version': '1.0'} - self.hooks_artifact = self.controller.create( - self.req, 'hooks_artifact', values) - - def test_upload_hook(self): - var_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), - '../', 'var')) - data_path = os.path.join(var_dir, 'hooks.zip') - with open(data_path, "rb") as data: - self.controller.upload_blob( - self.req, 'hooks_artifact', self.hooks_artifact['id'], 'zip', - data, 'application/octet-stream') - artifact = self.controller.show(self.req, 'hooks_artifact', - self.hooks_artifact['id']) - self.assertEqual(818, artifact['zip']['size']) - self.assertEqual('active', artifact['zip']['status']) - - self.assertEqual(11, artifact['content']['aaa.txt']['size']) - self.assertEqual(11, artifact['content']['folder1/bbb.txt']['size']) - self.assertEqual( - 11, artifact['content']['folder1/folder2/ccc.txt']['size']) - - def test_upload_hook_inmemory(self): - # enable in-memory processing - self.config(in_memory_processing=True, - group='hooks_artifact') - - self.test_upload_hook() - - def test_download_hook(self): - # upload data - var_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), - '../', 'var')) - data_path = os.path.join(var_dir, 'hooks.zip') - with open(data_path, "rb") as data: - self.controller.upload_blob( - self.req, 'hooks_artifact', self.hooks_artifact['id'], 'zip', - data, 'application/octet-stream') - - # download main 'zip' - data = self.controller.download_blob( - self.req, 'hooks_artifact', self.hooks_artifact['id'], - 'zip')['data'] - bytes_read = 0 - for chunk in data: - bytes_read += len(chunk) - self.assertEqual(818, bytes_read) - - # download a file from 'content' - data = self.controller.download_blob( - self.req, 'hooks_artifact', self.hooks_artifact['id'], - 'content/folder1/bbb.txt')['data'] - bytes_read = 0 - for chunk in data: - bytes_read += len(chunk) - self.assertEqual(11, bytes_read) - - # now forbid to download zip - changes = [{'op': 'replace', 'path': '/forbid_download_zip', - 'value': 'yes'}] - self.update_with_values(changes, art_type='hooks_artifact', - art_id=self.hooks_artifact['id']) - - artifact = self.controller.show(self.req, 'hooks_artifact', - self.hooks_artifact['id']) - self.assertEqual(True, artifact['forbid_download_zip']) - - # download from 'zip' fails now - self.assertRaises( - exc.BadRequest, self.controller.download_blob, - self.req, 'hooks_artifact', self.hooks_artifact['id'], 'zip') - - # download a 'content' file still works - data = self.controller.download_blob( - self.req, 'hooks_artifact', self.hooks_artifact['id'], - 'content/folder1/folder2/ccc.txt')['data'] - bytes_read = 0 - for chunk in data: - bytes_read += len(chunk) - self.assertEqual(11, bytes_read) - - def test_activation_hook(self): - # forbid to activate artifact - changes = [{'op': 'replace', 'path': '/forbid_activate', - 'value': 'yes'}] - self.update_with_values(changes, art_type='hooks_artifact', - art_id=self.hooks_artifact['id']) - - # activation fails now - changes = [{'op': 'replace', 'path': '/status', - 'value': 'active'}] - self.assertRaises( - exc.BadRequest, self.update_with_values, changes, - art_type='hooks_artifact', art_id=self.hooks_artifact['id']) - - # unblock artifact activation - changes = [{'op': 'replace', 'path': '/forbid_activate', - 'value': 'no'}] - self.update_with_values(changes, art_type='hooks_artifact', - art_id=self.hooks_artifact['id']) - - # now activation works - changes = [{'op': 'replace', 'path': '/status', - 'value': 'active'}] - art = self.update_with_values(changes, art_type='hooks_artifact', - art_id=self.hooks_artifact['id']) - self.assertEqual('active', art['status']) - - def test_publishing_hook(self): - self.req = self.get_fake_request(user=self.users['admin']) - - # activate artifact to begin - changes = [{'op': 'replace', 'path': '/status', - 'value': 'active'}] - art = self.update_with_values(changes, art_type='hooks_artifact', - art_id=self.hooks_artifact['id']) - self.assertEqual('active', art['status']) - - # forbid to publish artifact - changes = [{'op': 'replace', 'path': '/forbid_publish', - 'value': 'yes'}] - self.update_with_values(changes, art_type='hooks_artifact', - art_id=self.hooks_artifact['id']) - - # publication fails now - changes = [{'op': 'replace', 'path': '/visibility', - 'value': 'public'}] - self.assertRaises( - exc.BadRequest, self.update_with_values, changes, - art_type='hooks_artifact', art_id=self.hooks_artifact['id']) - - # unblock artifact publication - changes = [{'op': 'replace', 'path': '/forbid_publish', - 'value': 'no'}] - self.update_with_values(changes, art_type='hooks_artifact', - art_id=self.hooks_artifact['id']) - - # now publication works - changes = [{'op': 'replace', 'path': '/visibility', - 'value': 'public'}] - art = self.update_with_values(changes, art_type='hooks_artifact', - art_id=self.hooks_artifact['id']) - self.assertEqual('public', art['visibility']) - - def test_deletion_hook(self): - # forbid to activate artifact - changes = [{'op': 'replace', 'path': '/forbid_delete', - 'value': 'yes'}] - self.update_with_values(changes, art_type='hooks_artifact', - art_id=self.hooks_artifact['id']) - - # deletion fails now - self.assertRaises( - exc.BadRequest, self.controller.delete, self.req, - 'hooks_artifact', self.hooks_artifact['id']) - - # unblock artifact deletion - changes = [{'op': 'replace', 'path': '/forbid_delete', - 'value': 'no'}] - self.update_with_values(changes, art_type='hooks_artifact', - art_id=self.hooks_artifact['id']) - - # now deletion works - self.controller.delete(self.req, 'hooks_artifact', - self.hooks_artifact['id']) - self.assertRaises(exc.NotFound, self.controller.show, self.req, - 'hooks_artifact', self.hooks_artifact['id']) diff --git a/glare/tests/unit/test_validators.py b/glare/tests/unit/test_validators.py deleted file mode 100644 index 6aaf384..0000000 --- a/glare/tests/unit/test_validators.py +++ /dev/null @@ -1,368 +0,0 @@ -# Copyright 2017 - Nokia Networks -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from oslo_versionedobjects import fields - -from glare.objects.meta import fields as glare_fields -from glare.objects.meta import validators -from glare.tests.unit import base - - -class TestValidators(base.BaseTestArtifactAPI): - - """Class for testing field validators.""" - - def test_uuid(self): - # test if applied string is uuid4 - validator = validators.UUID() - - # valid string - no exception - validator('167f8083-6bef-4f37-bf04-250343a2d53c') - - # invalid string - ValueError - self.assertRaises(ValueError, validator, 'INVALID') - - # only strings can be applied as values - self.assertEqual((fields.StringField,), - validators.UUID.get_allowed_types()) - - self.assertEqual( - {'pattern': ('^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F])' - '{4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$')}, - validator.to_jsonschema()) - - def test_allowed_values(self): - # test that field may have preoccupied values - validator_s = validators.AllowedValues(['aaa', 'bbb']) - validator_i = validators.AllowedValues([1, 2, 3]) - validator_f = validators.AllowedValues([1.0, 2.0, 3.0]) - - # allowed value - no exception - validator_s('aaa') - validator_s('bbb') - validator_i(1) - validator_i(3) - validator_f(1.0) - validator_f(3.0) - - # not allowed value - value error - self.assertRaises(ValueError, validator_s, 'a') - self.assertRaises(ValueError, validator_i, 4) - self.assertRaises(ValueError, validator_f, 4.0) - - # only strings, integers and floats can be applied as values - self.assertEqual( - (fields.StringField, fields.IntegerField, fields.FloatField), - validators.AllowedValues.get_allowed_types()) - - self.assertEqual({'enum': ['aaa', 'bbb']}, validator_s.to_jsonschema()) - self.assertEqual({'enum': [1, 2, 3]}, validator_i.to_jsonschema()) - self.assertEqual({'enum': [1.0, 2.0, 3.0]}, - validator_f.to_jsonschema()) - - def test_max_str_len(self): - # test max allowed string length - validator = validators.MaxStrLen(10) - - # allowed length - no exception - validator('a' * 10) - validator('') - - # too long string - value error - self.assertRaises(ValueError, validator, 'a' * 11) - - # only strings can be applied as values - self.assertEqual((fields.StringField,), - validators.MaxStrLen.get_allowed_types()) - - self.assertEqual({'maxLength': 10}, validator.to_jsonschema()) - - def test_min_str_len(self): - # test min allowed string length - validator = validators.MinStrLen(10) - - # allowed length - no exception - validator('a' * 10) - - # too short string - value error - self.assertRaises(ValueError, validator, 'a' * 9) - self.assertRaises(ValueError, validator, '') - - # only strings can be applied as values - self.assertEqual((fields.StringField,), - validators.MinStrLen.get_allowed_types()) - - self.assertEqual({'minLength': 10}, validator.to_jsonschema()) - - def test_forbidden_chars(self): - # test that string has no forbidden chars - validator = validators.ForbiddenChars(['a', '?']) - - # allowed length - no exception - validator('b' * 10) - - # string contains forbidden chars - value error - self.assertRaises(ValueError, validator, 'abc') - self.assertRaises(ValueError, validator, '?') - - # only strings can be applied as values - self.assertEqual((fields.StringField,), - validators.ForbiddenChars.get_allowed_types()) - - self.assertEqual({'pattern': '^[^a?]+$'}, validator.to_jsonschema()) - - def test_max_dict_size(self): - # test max dict size - validator = validators.MaxDictSize(3) - - # allowed size - no exception - validator({'a': 1, 'b': 2, 'c': 3}) - validator({}) - - # too big dictionary - value error - self.assertRaises(ValueError, validator, - {'a': 1, 'b': 2, 'c': 3, 'd': 4}) - - # only dicts can be applied as values - self.assertEqual((glare_fields.Dict,), - validators.MaxDictSize.get_allowed_types()) - - self.assertEqual({'maxProperties': 3}, validator.to_jsonschema()) - - def test_min_dict_size(self): - # test min dict size - validator = validators.MinDictSize(3) - - # allowed size - no exception - validator({'a': 1, 'b': 2, 'c': 3}) - - # too small dictionary - value error - self.assertRaises(ValueError, validator, - {'a': 1, 'b': 2}) - self.assertRaises(ValueError, validator, {}) - - # only dicts can be applied as values - self.assertEqual((glare_fields.Dict,), - validators.MinDictSize.get_allowed_types()) - - self.assertEqual({'minProperties': 3}, validator.to_jsonschema()) - - def test_max_list_size(self): - # test max list size - validator = validators.MaxListSize(3) - - # allowed size - no exception - validator(['a', 'b', 'c']) - validator([]) - - # too big list - value error - self.assertRaises(ValueError, validator, - ['a', 'b', 'c', 'd']) - - # only lists can be applied as values - self.assertEqual((glare_fields.List,), - validators.MaxListSize.get_allowed_types()) - - self.assertEqual({'maxItems': 3}, validator.to_jsonschema()) - - def test_min_list_size(self): - # test max list size - validator = validators.MinListSize(3) - - # allowed size - no exception - validator(['a', 'b', 'c']) - - # too small list - value error - self.assertRaises(ValueError, validator, ['a', 'b']) - self.assertRaises(ValueError, validator, []) - - # only lists can be applied as values - self.assertEqual((glare_fields.List,), - validators.MinListSize.get_allowed_types()) - - self.assertEqual({'minItems': 3}, validator.to_jsonschema()) - - def test_max_number_size(self): - # test max number size - validator = validators.MaxNumberSize(10) - - # allowed size - no exception - validator(10) - validator(0) - validator(10.0) - validator(0.0) - - # too big number - value error - self.assertRaises(ValueError, validator, 11) - self.assertRaises(ValueError, validator, 10.1) - - # only integers and floats can be applied as values - self.assertEqual((fields.IntegerField, fields.FloatField), - validators.MaxNumberSize.get_allowed_types()) - - self.assertEqual({'maximum': 10}, validator.to_jsonschema()) - - def test_min_number_size(self): - # test min number size - validator = validators.MinNumberSize(10) - - # allowed size - no exception - validator(10) - validator(10.0) - - # too small number - value error - self.assertRaises(ValueError, validator, 9) - self.assertRaises(ValueError, validator, 9.9) - self.assertRaises(ValueError, validator, 0) - self.assertRaises(ValueError, validator, 0) - - # only integers and floats can be applied as values - self.assertEqual((fields.IntegerField, fields.FloatField), - validators.MinNumberSize.get_allowed_types()) - - self.assertEqual({'minimum': 10}, validator.to_jsonschema()) - - def test_unique(self): - # test uniqueness of list elements - - # validator raises exception in case of duplicates in the list - validator = validators.Unique() - # non strict validator removes duplicates without raising of ValueError - validator_nonstrict = validators.Unique(convert_to_set=True) - - # all elements unique - no exception - validator(['a', 'b', 'c']) - validator([]) - - # duplicates in the list - value error - self.assertRaises(ValueError, validator, ['a', 'a', 'b']) - - # non-strict validator converts list to set of elements - l = ['a', 'a', 'b'] - validator_nonstrict(l) - self.assertEqual({'a', 'b'}, set(l)) - - # only lists can be applied as values - self.assertEqual((glare_fields.List,), - validators.Unique.get_allowed_types()) - - self.assertEqual({'uniqueItems': True}, validator.to_jsonschema()) - - def test_allowed_dict_keys(self): - # test that dictionary contains only allowed keys - validator = validators.AllowedDictKeys(['aaa', 'bbb', 'ccc']) - - # only allowed keys - no exception - validator({'aaa': 5, 'bbb': 6}) - validator({}) - - # if dictionary has other keys - value error - self.assertRaises(ValueError, validator, {'aaa': 5, 'a': 7, 'bbb': 6}) - - # only dicts can be applied as values - self.assertEqual((glare_fields.Dict,), - validators.AllowedDictKeys.get_allowed_types()) - - self.assertEqual({'properties': {'aaa': {}, 'bbb': {}, 'ccc': {}}}, - validator.to_jsonschema()) - - def test_required_dict_keys(self): - # test that dictionary has required keys - validator = validators.RequiredDictKeys(['aaa', 'bbb']) - - # if dict has required keys - no exception - validator({'aaa': 5, 'bbb': 6}) - validator({'aaa': 5, 'bbb': 6, 'ccc': 7}) - - # in other case - value error - self.assertRaises(ValueError, validator, {'aaa': 5, 'a': 7}) - self.assertRaises(ValueError, validator, {}) - - # only dicts can be applied as values - self.assertEqual((glare_fields.Dict,), - validators.RequiredDictKeys.get_allowed_types()) - - self.assertEqual({'required': ['aaa', 'bbb']}, - validator.to_jsonschema()) - - def test_max_dict_key_len(self): - # test max limit for dict key length - validator = validators.MaxDictKeyLen(5) - - # if key length less than the limit - no exception - validator({'aaaaa': 5, 'bbbbb': 4}) - - # in other case - value error - self.assertRaises(ValueError, validator, {'aaaaaa': 5, 'a': 7}) - - # only dicts can be applied as values - self.assertEqual((glare_fields.Dict,), - validators.MaxDictKeyLen.get_allowed_types()) - - def test_mix_dict_key_len(self): - # test min limit for dict key length - validator = validators.MinDictKeyLen(5) - - # if key length bigger than the limit - no exception - validator({'aaaaa': 5, 'bbbbb': 4}) - - # in other case - value error - self.assertRaises(ValueError, validator, {'aaaaa': 5, 'a': 7}) - - # only dicts can be applied as values - self.assertEqual((glare_fields.Dict,), - validators.MinDictKeyLen.get_allowed_types()) - - def test_allowed_list_values(self): - # test that list contains only allowed values - # AllowedValues validator will be applied to each element of the list - validator = validators.ListElementValidator( - [validators.AllowedValues(['aaa', 'bbb', 'ccc'])]) - - # only allowed values - no exception - validator(['aaa', 'bbb']) - validator([]) - - # if list has other values - value error - self.assertRaises(ValueError, validator, ['aaa', 'a', 'bbb']) - self.assertRaises(ValueError, validator, ['ccc', {'aaa': 'bbb'}]) - - # only lists can be applied as values - self.assertEqual((glare_fields.List,), - validators.ListElementValidator.get_allowed_types()) - - self.assertEqual({'itemValidators': [{'enum': ['aaa', 'bbb', 'ccc']}]}, - validator.to_jsonschema()) - - def test_allowed_dict_values(self): - # test that dict contains only allowed values - # AllowedValues validator will be applied to each element of the dict - validator = validators.DictElementValidator( - [validators.AllowedValues(['aaa', 'bbb', 'ccc'])]) - - # only allowed values - no exception - validator({'a': 'aaa', 'b': 'bbb'}) - validator({}) - - # if dict has other values - value error - self.assertRaises(ValueError, validator, - {'a': 'aaa', 'b': 'bbb', 'c': 'c'}) - - # only dict can be applied as values - self.assertEqual((glare_fields.Dict,), - validators.DictElementValidator.get_allowed_types()) - - self.assertEqual( - {'propertyValidators': [{'enum': ['aaa', 'bbb', 'ccc']}]}, - validator.to_jsonschema()) diff --git a/glare/tests/unit/test_versions.py b/glare/tests/unit/test_versions.py deleted file mode 100644 index 01a34c4..0000000 --- a/glare/tests/unit/test_versions.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright 2016 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_serialization import jsonutils -import webob - -from glare.api import versions -from glare.tests.unit import base - - -class VersionsTest(base.BaseTestCase): - - """Test the version information returned from the API service.""" - - def test_get_version_list(self): - req = webob.Request.blank('/', base_url='http://127.0.0.1:9494/') - req.accept = 'application/json' - res = versions.Controller().index(req, is_multi=True) - self.assertEqual(300, res.status_int) - self.assertEqual('application/json', res.content_type) - results = jsonutils.loads(res.body)['versions'] - expected = [ - { - 'id': 'v1.0', - 'status': 'EXPERIMENTAL', - 'links': [{'rel': 'self', - 'href': 'http://127.0.0.1:9494/'}], - 'min_version': '1.0', - 'version': '1.0' - } - ] - self.assertEqual(expected, results) - - def test_get_version_list_public_endpoint(self): - req = webob.Request.blank('/', base_url='http://127.0.0.1:9494/') - req.accept = 'application/json' - self.config(bind_host='127.0.0.1', bind_port=9494, - public_endpoint='https://example.com:9494') - res = versions.Controller().index(req, is_multi=True) - self.assertEqual(300, res.status_int) - self.assertEqual('application/json', res.content_type) - results = jsonutils.loads(res.body)['versions'] - expected = [ - { - 'id': 'v1.0', - 'status': 'EXPERIMENTAL', - 'links': [{'rel': 'self', - 'href': 'https://example.com:9494/'}], - 'min_version': '1.0', - 'version': '1.0' - } - ] - self.assertEqual(expected, results) diff --git a/glare/tests/unit/test_wsgi.py b/glare/tests/unit/test_wsgi.py deleted file mode 100644 index a6561d8..0000000 --- a/glare/tests/unit/test_wsgi.py +++ /dev/null @@ -1,558 +0,0 @@ -# Copyright 2010-2011 OpenStack Foundation -# Copyright 2014 IBM Corp. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import os -import socket - -import eventlet.patcher -import fixtures -import mock -from oslo_concurrency import processutils -from oslo_serialization import jsonutils -import routes -import six -from six.moves import http_client as http -import webob - -from glare.api.v1 import router -from glare.common import exception -from glare.common import wsgi -from glare import i18n -from glare.tests.unit import base - - -class RequestTest(base.BaseTestCase): - - def test_content_range(self): - request = wsgi.Request.blank('/tests/123') - request.headers["Content-Range"] = 'bytes 10-99/*' - range_ = request.get_content_range() - self.assertEqual(10, range_.start) - self.assertEqual(100, range_.stop) # non-inclusive - self.assertIsNone(range_.length) - - def test_content_range_invalid(self): - request = wsgi.Request.blank('/tests/123') - request.headers["Content-Range"] = 'bytes=0-99' - self.assertRaises(webob.exc.HTTPBadRequest, - request.get_content_range) - - def test_language_accept_default(self): - request = wsgi.Request.blank('/tests/123') - request.headers["Accept-Language"] = "zz-ZZ,zz;q=0.8" - result = request.best_match_language() - self.assertIsNone(result) - - def test_language_accept_none(self): - request = wsgi.Request.blank('/tests/123') - result = request.best_match_language() - self.assertIsNone(result) - - def test_best_match_language_expected(self): - # If Accept-Language is a supported language, best_match_language() - # returns it. - with mock.patch('babel.localedata.locale_identifiers', - return_value=['en']): - req = wsgi.Request.blank('/', headers={'Accept-Language': 'en'}) - self.assertEqual('en_US', req.best_match_language()) - - def test_request_match_language_unexpected(self): - # If Accept-Language is a language we do not support, - # best_match_language() returns None. - with mock.patch('babel.localedata.locale_identifiers', - return_value=['en']): - req = wsgi.Request.blank( - '/', headers={'Accept-Language': 'Klingon'}) - self.assertIsNone(req.best_match_language()) - - @mock.patch.object(webob.acceptparse.AcceptLanguage, 'best_match') - def test_best_match_language_unknown(self, mock_best_match): - # Test that we are actually invoking language negotiation by webop - request = wsgi.Request.blank('/') - accepted = 'unknown-lang' - request.headers = {'Accept-Language': accepted} - - mock_best_match.return_value = None - - self.assertIsNone(request.best_match_language()) - - # If Accept-Language is missing or empty, match should be None - request.headers = {'Accept-Language': ''} - self.assertIsNone(request.best_match_language()) - request.headers.pop('Accept-Language') - self.assertIsNone(request.best_match_language()) - - def test_http_error_response_codes(self): - """Makes sure v1 unallowed methods return 405""" - unallowed_methods = [ - ('/schemas', ['PUT', 'DELETE', 'HEAD', 'PATCH', 'POST']), - ('/schemas/type_name', ['PUT', 'DELETE', 'HEAD', 'PATCH', 'POST']), - ('/artifacts/type_name', ['PUT', 'DELETE', 'HEAD', 'PATCH']), - ('/artifacts/type_name/artifact_id', ['PUT', 'HEAD', 'POST']), - ('/artifacts/type_name/artifact_id/blob)name', - ['DELETE', 'HEAD', 'PATCH', 'POST']), - ] - api = router.API(routes.Mapper()) - for uri, methods in unallowed_methods: - for method in methods: - req = webob.Request.blank(uri) - req.method = method - res = req.get_response(api) - self.assertEqual(http.METHOD_NOT_ALLOWED, res.status_int) - - # Makes sure not implemented methods return 405 - req = webob.Request.blank('/schemas/image') - req.method = 'NonexistentMethod' - res = req.get_response(api) - self.assertEqual(http.METHOD_NOT_ALLOWED, res.status_int) - - -class ResourceTest(base.BaseTestCase): - - def test_get_action_args(self): - env = { - 'wsgiorg.routing_args': [ - None, - { - 'controller': None, - 'format': None, - 'action': 'update', - 'id': 12, - }, - ], - } - - expected = {'action': 'update', 'id': 12} - actual = wsgi.Resource(None, None, None).get_action_args(env) - - self.assertEqual(expected, actual) - - def test_get_action_args_invalid_index(self): - env = {'wsgiorg.routing_args': []} - expected = {} - actual = wsgi.Resource(None, None, None).get_action_args(env) - self.assertEqual(expected, actual) - - def test_get_action_args_del_controller_error(self): - actions = {'format': None, - 'action': 'update', - 'id': 12} - env = {'wsgiorg.routing_args': [None, actions]} - expected = {'action': 'update', 'id': 12} - actual = wsgi.Resource(None, None, None).get_action_args(env) - self.assertEqual(expected, actual) - - def test_get_action_args_del_format_error(self): - actions = {'action': 'update', 'id': 12} - env = {'wsgiorg.routing_args': [None, actions]} - expected = {'action': 'update', 'id': 12} - actual = wsgi.Resource(None, None, None).get_action_args(env) - self.assertEqual(expected, actual) - - def test_dispatch(self): - class Controller(object): - def index(self, shirt, pants=None): - return (shirt, pants) - - resource = wsgi.Resource(None, None, None) - actual = resource.dispatch(Controller(), 'index', 'on', pants='off') - expected = ('on', 'off') - self.assertEqual(expected, actual) - - def test_dispatch_default(self): - class Controller(object): - def default(self, shirt, pants=None): - return (shirt, pants) - - resource = wsgi.Resource(None, None, None) - actual = resource.dispatch(Controller(), 'index', 'on', pants='off') - expected = ('on', 'off') - self.assertEqual(expected, actual) - - def test_dispatch_no_default(self): - class Controller(object): - def show(self, shirt, pants=None): - return (shirt, pants) - - resource = wsgi.Resource(None, None, None) - self.assertRaises(AttributeError, resource.dispatch, Controller(), - 'index', 'on', pants='off') - - def test_call(self): - class FakeController(object): - def index(self, shirt, pants=None): - return shirt, pants - - resource = wsgi.Resource(FakeController(), None, None) - - def dispatch(obj, *args, **kwargs): - if isinstance(obj, wsgi.JSONRequestDeserializer): - return [] - if isinstance(obj, wsgi.JSONResponseSerializer): - raise webob.exc.HTTPForbidden() - - with mock.patch('glare.common.wsgi.Resource.dispatch', - side_effect=dispatch): - request = wsgi.Request.blank('/') - response = resource.__call__(request) - - self.assertIsInstance(response, webob.exc.HTTPForbidden) - self.assertEqual(http.FORBIDDEN, response.status_code) - - def test_call_raises_exception(self): - class FakeController(object): - def index(self, shirt, pants=None): - return (shirt, pants) - - resource = wsgi.Resource(FakeController(), None, None) - - with mock.patch('glare.common.wsgi.Resource.dispatch', - side_effect=Exception("test exception")): - request = wsgi.Request.blank('/') - response = resource.__call__(request) - - self.assertIsInstance(response, webob.exc.HTTPInternalServerError) - self.assertEqual(http.INTERNAL_SERVER_ERROR, response.status_code) - - @mock.patch.object(wsgi, 'translate_exception') - def test_resource_call_error_handle_localized(self, - mock_translate_exception): - class Controller(object): - def delete(self, req, identity): - raise webob.exc.HTTPBadRequest(explanation='Not Found') - - actions = {'action': 'delete', 'identity': 12} - env = {'wsgiorg.routing_args': [None, actions]} - request = wsgi.Request.blank('/tests/123', environ=env) - message_es = 'No Encontrado' - - resource = wsgi.Resource(Controller(), - wsgi.JSONRequestDeserializer(), - None) - translated_exc = webob.exc.HTTPBadRequest(message_es) - mock_translate_exception.return_value = translated_exc - - e = self.assertRaises(webob.exc.HTTPBadRequest, - resource, request) - self.assertEqual(message_es, str(e)) - - @mock.patch.object(webob.acceptparse.AcceptLanguage, 'best_match') - @mock.patch.object(i18n, 'translate') - def test_translate_exception(self, mock_translate, mock_best_match): - - mock_translate.return_value = 'No Encontrado' - mock_best_match.return_value = 'de' - - req = wsgi.Request.blank('/tests/123') - req.headers["Accept-Language"] = "de" - - e = webob.exc.HTTPNotFound(explanation='Not Found') - e = wsgi.translate_exception(req, e) - self.assertEqual('No Encontrado', e.explanation) - - def test_response_headers_encoded(self): - # prepare environment - for_openstack_comrades = \ - u'\u0417\u0430 \u043e\u043f\u0435\u043d\u0441\u0442\u0435\u043a, ' \ - u'\u0442\u043e\u0432\u0430\u0440\u0438\u0449\u0438' - - class FakeController(object): - def index(self, shirt, pants=None): - return (shirt, pants) - - class FakeSerializer(object): - def index(self, response, result): - response.headers['unicode_test'] = for_openstack_comrades - - # make request - resource = wsgi.Resource(FakeController(), None, FakeSerializer()) - actions = {'action': 'index'} - env = {'wsgiorg.routing_args': [None, actions]} - request = wsgi.Request.blank('/tests/123', environ=env) - response = resource.__call__(request) - - # ensure it has been encoded correctly - value = (response.headers['unicode_test'].decode('utf-8') - if six.PY2 else response.headers['unicode_test']) - self.assertEqual(for_openstack_comrades, value) - - -class JSONResponseSerializerTest(base.BaseTestCase): - - def test_to_json(self): - fixture = {"key": "value"} - expected = b'{"key": "value"}' - actual = wsgi.JSONResponseSerializer().to_json(fixture) - self.assertEqual(expected, actual) - - def test_to_json_with_date_format_value(self): - fixture = {"date": datetime.datetime(1901, 3, 8, 2)} - expected = b'{"date": "1901-03-08T02:00:00.000000"}' - actual = wsgi.JSONResponseSerializer().to_json(fixture) - self.assertEqual(expected, actual) - - def test_to_json_with_more_deep_format(self): - fixture = {"is_public": True, "name": [{"name1": "test"}]} - expected = {"is_public": True, "name": [{"name1": "test"}]} - actual = wsgi.JSONResponseSerializer().to_json(fixture) - actual = jsonutils.loads(actual) - for k in expected: - self.assertEqual(expected[k], actual[k]) - - def test_to_json_with_set(self): - fixture = set(["foo"]) - expected = b'["foo"]' - actual = wsgi.JSONResponseSerializer().to_json(fixture) - self.assertEqual(expected, actual) - - def test_default(self): - fixture = {"key": "value"} - response = webob.Response() - wsgi.JSONResponseSerializer().default(response, fixture) - self.assertEqual(http.OK, response.status_int) - content_types = [h for h in response.headerlist - if h[0] == 'Content-Type'] - self.assertEqual(1, len(content_types)) - self.assertEqual('application/json', response.content_type) - self.assertEqual(b'{"key": "value"}', response.body) - - -class JSONRequestDeserializerTest(base.BaseTestCase): - - def test_has_body_no_content_length(self): - request = wsgi.Request.blank('/') - request.method = 'POST' - request.body = b'asdf' - request.headers.pop('Content-Length') - self.assertFalse(wsgi.JSONRequestDeserializer().has_body(request)) - - def test_has_body_zero_content_length(self): - request = wsgi.Request.blank('/') - request.method = 'POST' - request.body = b'asdf' - request.headers['Content-Length'] = 0 - self.assertFalse(wsgi.JSONRequestDeserializer().has_body(request)) - - def test_has_body_has_content_length(self): - request = wsgi.Request.blank('/') - request.method = 'POST' - request.body = b'asdf' - self.assertIn('Content-Length', request.headers) - self.assertTrue(wsgi.JSONRequestDeserializer().has_body(request)) - - def test_no_body_no_content_length(self): - request = wsgi.Request.blank('/') - self.assertFalse(wsgi.JSONRequestDeserializer().has_body(request)) - - def test_from_json(self): - fixture = '{"key": "value"}' - expected = {"key": "value"} - actual = wsgi.JSONRequestDeserializer().from_json(fixture) - self.assertEqual(expected, actual) - - def test_from_json_malformed(self): - fixture = 'kjasdklfjsklajf' - self.assertRaises(webob.exc.HTTPBadRequest, - wsgi.JSONRequestDeserializer().from_json, fixture) - - def test_default_no_body(self): - request = wsgi.Request.blank('/') - actual = wsgi.JSONRequestDeserializer().default(request) - expected = {} - self.assertEqual(expected, actual) - - def test_default_with_body(self): - request = wsgi.Request.blank('/') - request.method = 'POST' - request.body = b'{"key": "value"}' - actual = wsgi.JSONRequestDeserializer().default(request) - expected = {"body": {"key": "value"}} - self.assertEqual(expected, actual) - - def test_has_body_has_transfer_encoding(self): - self.assertTrue(self._check_transfer_encoding( - transfer_encoding='chunked')) - - def test_has_body_multiple_transfer_encoding(self): - self.assertTrue(self._check_transfer_encoding( - transfer_encoding='chunked, gzip')) - - def test_has_body_invalid_transfer_encoding(self): - self.assertFalse(self._check_transfer_encoding( - transfer_encoding='invalid', content_length=0)) - - def test_has_body_invalid_transfer_encoding_no_content_len_and_body(self): - self.assertFalse(self._check_transfer_encoding( - transfer_encoding='invalid', include_body=False)) - - def test_has_body_invalid_transfer_encoding_no_content_len_but_body(self): - self.assertTrue(self._check_transfer_encoding( - transfer_encoding='invalid', include_body=True)) - - def test_has_body_invalid_transfer_encoding_with_content_length(self): - self.assertTrue(self._check_transfer_encoding( - transfer_encoding='invalid', content_length=5)) - - def test_has_body_valid_transfer_encoding_with_content_length(self): - self.assertTrue(self._check_transfer_encoding( - transfer_encoding='chunked', content_length=1)) - - def test_has_body_valid_transfer_encoding_without_content_length(self): - self.assertTrue(self._check_transfer_encoding( - transfer_encoding='chunked')) - - def _check_transfer_encoding(self, transfer_encoding=None, - content_length=None, include_body=True): - request = wsgi.Request.blank('/') - request.method = 'POST' - if include_body: - request.body = b'fake_body' - request.headers['transfer-encoding'] = transfer_encoding - if content_length is not None: - request.headers['content-length'] = content_length - - return wsgi.JSONRequestDeserializer().has_body(request) - - def test_get_bind_addr_default_value(self): - expected = ('0.0.0.0', '123456') - actual = wsgi.get_bind_addr(default_port="123456") - self.assertEqual(expected, actual) - - -class ServerTest(base.BaseTestCase): - def test_create_pool(self): - """Ensure the wsgi thread pool is an eventlet.greenpool.GreenPool.""" - actual = wsgi.Server(threads=1).create_pool() - self.assertIsInstance(actual, eventlet.greenpool.GreenPool) - - @mock.patch.object(wsgi.Server, 'configure_socket') - def test_http_keepalive(self, mock_configure_socket): - self.config(http_keepalive=False) - self.config(workers=None) - - server = wsgi.Server(threads=1) - server.sock = 'fake_socket' - # mocking eventlet.wsgi server method to check it is called with - # configured 'http_keepalive' value. - with mock.patch.object(eventlet.wsgi, - 'server') as mock_server: - fake_application = "fake-application" - server.start(fake_application, 0) - server.wait() - mock_server.assert_called_once_with('fake_socket', - fake_application, - log=server._logger, - debug=False, - custom_pool=server.pool, - keepalive=False, - socket_timeout=900) - - def test_number_of_workers(self): - """Ensure the default number of workers matches num cpus.""" - def pid(): - i = 1 - while True: - i += 1 - yield i - - with mock.patch.object(os, 'fork') as mock_fork: - mock_fork.side_effect = pid - server = wsgi.Server() - server.configure = mock.Mock() - fake_application = "fake-application" - server.start(fake_application, None) - self.assertEqual(processutils.get_worker_count(), - len(server.children)) - - def test_set_eventlet_hub_exception(self): - with mock.patch('eventlet.hubs.use_hub', side_effect=Exception): - self.assertRaises(exception.WorkerCreationFailure, - wsgi.set_eventlet_hub) - - -class GetSocketTestCase(base.BaseTestCase): - - def setUp(self): - super(GetSocketTestCase, self).setUp() - self.useFixture(fixtures.MonkeyPatch( - "glare.common.wsgi.get_bind_addr", - lambda x: ('192.168.0.13', 1234))) - addr_info_list = [(2, 1, 6, '', ('192.168.0.13', 80)), - (2, 2, 17, '', ('192.168.0.13', 80)), - (2, 3, 0, '', ('192.168.0.13', 80))] - self.useFixture(fixtures.MonkeyPatch( - "glare.common.wsgi.socket.getaddrinfo", - lambda *x: addr_info_list)) - self.useFixture(fixtures.MonkeyPatch( - "glare.common.wsgi.time.time", - mock.Mock(side_effect=[0, 1, 5, 10, 20, 35]))) - self.useFixture(fixtures.MonkeyPatch( - "glare.common.wsgi.utils.validate_key_cert", - lambda *x: None)) - wsgi.CONF.cert_file = '/etc/ssl/cert' - wsgi.CONF.key_file = '/etc/ssl/key' - wsgi.CONF.ca_file = '/etc/ssl/ca_cert' - wsgi.CONF.tcp_keepidle = 600 - - def test_correct_configure_socket(self): - mock_socket = mock.Mock() - self.useFixture(fixtures.MonkeyPatch( - 'glare.common.wsgi.ssl.wrap_socket', - mock_socket)) - self.useFixture(fixtures.MonkeyPatch( - 'glare.common.wsgi.eventlet.listen', - lambda *x, **y: mock_socket)) - server = wsgi.Server() - server.default_port = 1234 - server.configure_socket() - self.assertIn(mock.call.setsockopt( - socket.SOL_SOCKET, - socket.SO_REUSEADDR, - 1), mock_socket.mock_calls) - self.assertIn(mock.call.setsockopt( - socket.SOL_SOCKET, - socket.SO_KEEPALIVE, - 1), mock_socket.mock_calls) - if hasattr(socket, 'TCP_KEEPIDLE'): - self.assertIn(mock.call().setsockopt( - socket.IPPROTO_TCP, - socket.TCP_KEEPIDLE, - wsgi.CONF.tcp_keepidle), mock_socket.mock_calls) - - def test_get_socket_without_all_ssl_reqs(self): - wsgi.CONF.key_file = None - self.assertRaises(RuntimeError, wsgi.get_socket, 1234) - - def test_get_socket_with_bind_problems(self): - self.useFixture(fixtures.MonkeyPatch( - 'glare.common.wsgi.eventlet.listen', - mock.Mock(side_effect=( - [wsgi.socket.error(socket.errno.EADDRINUSE)] * 3 + [None])))) - self.useFixture(fixtures.MonkeyPatch( - 'glare.common.wsgi.ssl.wrap_socket', - lambda *x, **y: None)) - - self.assertRaises(RuntimeError, wsgi.get_socket, 1234) - - def test_get_socket_with_unexpected_socket_errno(self): - self.useFixture(fixtures.MonkeyPatch( - 'glare.common.wsgi.eventlet.listen', - mock.Mock(side_effect=wsgi.socket.error(socket.errno.ENOMEM)))) - self.useFixture(fixtures.MonkeyPatch( - 'glare.common.wsgi.ssl.wrap_socket', - lambda *x, **y: None)) - self.assertRaises(wsgi.socket.error, wsgi.get_socket, 1234) diff --git a/glare/tests/utils.py b/glare/tests/utils.py deleted file mode 100644 index 102887b..0000000 --- a/glare/tests/utils.py +++ /dev/null @@ -1,376 +0,0 @@ -# Copyright 2010-2011 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Common utilities used in testing""" -import errno -import functools -import os -import shlex -import shutil -import socket -import subprocess - -import fixtures -from oslo_config import cfg -from oslo_config import fixture as cfg_fixture -from oslo_log import log -import testtools - -from glare.common import config - -CONF = cfg.CONF -try: - CONF.debug -except cfg.NoSuchOptError: - # NOTE(sigmavirus24): If we run the entire test suite, the logging options - # will be registered appropriately and we do not need to re-register them. - # However, when we run a test in isolation (or use --debug), those options - # will not be registered for us. In order for a test in a class that - # inherits from BaseTestCase to even run, we will need to register them - # ourselves. BaseTestCase.config will set the debug level if something - # calls self.config(debug=True) so we need these options registered - # appropriately. - # See bug 1433785 for more details. - log.register_options(CONF) - - -class BaseTestCase(testtools.TestCase): - - def setUp(self): - super(BaseTestCase, self).setUp() - - self._config_fixture = self.useFixture(cfg_fixture.Config()) - - # NOTE(bcwaldon): parse_args has to be called to register certain - # command-line options - specifically we need config_dir for - # the following policy tests - config.parse_args(args=[]) - self.addCleanup(CONF.reset) - self.test_dir = self.useFixture(fixtures.TempDir()).path - self.conf_dir = os.path.join(self.test_dir, 'etc') - safe_mkdirs(self.conf_dir) - self.set_policy() - - def set_policy(self): - conf_file = "policy.json" - self.policy_file = self._copy_data_file(conf_file, self.conf_dir) - self.config(policy_file=self.policy_file, group='oslo_policy') - - def _copy_data_file(self, file_name, dst_dir): - src_file_name = os.path.join('glare/tests/etc', file_name) - shutil.copy(src_file_name, dst_dir) - dst_file_name = os.path.join(dst_dir, file_name) - return dst_file_name - - def config(self, **kw): - """Override some configuration values. - - The keyword arguments are the names of configuration options to - override and their values. - If a group argument is supplied, the overrides are applied to - the specified configuration option group. - All overrides are automatically cleared at the end of the current - test by the fixtures cleanup process. - """ - self._config_fixture.config(**kw) - - -class requires(object): - """Decorator that initiates additional test setup/teardown.""" - def __init__(self, setup=None, teardown=None): - self.setup = setup - self.teardown = teardown - - def __call__(self, func): - def _runner(*args, **kw): - if self.setup: - self.setup(args[0]) - func(*args, **kw) - if self.teardown: - self.teardown(args[0]) - _runner.__name__ = func.__name__ - _runner.__doc__ = func.__doc__ - return _runner - - -class depends_on_exe(object): - """Decorator to skip test if an executable is unavailable""" - def __init__(self, exe): - self.exe = exe - - def __call__(self, func): - def _runner(*args, **kw): - cmd = 'which %s' % self.exe - exitcode, out, err = execute(cmd, raise_error=False) - if exitcode != 0: - args[0].disabled_message = 'test requires exe: %s' % self.exe - args[0].disabled = True - func(*args, **kw) - _runner.__name__ = func.__name__ - _runner.__doc__ = func.__doc__ - return _runner - - -def skip_if_disabled(func): - """Decorator that skips a test if test case is disabled.""" - @functools.wraps(func) - def wrapped(*a, **kwargs): - func.__test__ = False - test_obj = a[0] - message = getattr(test_obj, 'disabled_message', - 'Test disabled') - if getattr(test_obj, 'disabled', False): - test_obj.skipTest(message) - func(*a, **kwargs) - return wrapped - - -def fork_exec(cmd, - exec_env=None, - logfile=None, - pass_fds=None): - """Execute a command using fork/exec. - - This is needed for programs system executions that need path - searching but cannot have a shell as their parent process, for - example: glare. When glare starts it sets itself as - the parent process for its own process group. Thus the pid that - a Popen process would have is not the right pid to use for killing - the process group. This patch gives the test env direct access - to the actual pid. - - :param cmd: Command to execute as an array of arguments. - :param exec_env: A dictionary representing the environment with - which to run the command. - :param logfile: A path to a file which will hold the stdout/err of - the child process. - :param pass_fds: Sequence of file descriptors passed to the child. - """ - env = os.environ.copy() - if exec_env is not None: - for env_name, env_val in exec_env.items(): - if callable(env_val): - env[env_name] = env_val(env.get(env_name)) - else: - env[env_name] = env_val - - pid = os.fork() - if pid == 0: - if logfile: - fds = [1, 2] - with open(logfile, 'r+b') as fptr: - for desc in fds: # close fds - try: - os.dup2(fptr.fileno(), desc) - except OSError: - pass - if pass_fds and hasattr(os, 'set_inheritable'): - # os.set_inheritable() is only available and needed - # since Python 3.4. On Python 3.3 and older, file descriptors are - # inheritable by default. - for fd in pass_fds: - os.set_inheritable(fd, True) - - args = shlex.split(cmd) - os.execvpe(args[0], args, env) - else: - return pid - - -def wait_for_fork(pid, - raise_error=True, - expected_exitcode=0): - """Wait for a process to complete - - This function will wait for the given pid to complete. If the - exit code does not match that of the expected_exitcode an error - is raised. - """ - - rc = 0 - try: - (pid, rc) = os.waitpid(pid, 0) - rc = os.WEXITSTATUS(rc) - if rc != expected_exitcode: - raise RuntimeError('The exit code %d is not %d' - % (rc, expected_exitcode)) - except Exception: - if raise_error: - raise - - return rc - - -def execute(cmd, - raise_error=True, - no_venv=False, - exec_env=None, - expect_exit=True, - expected_exitcode=0, - context=None): - """Executes a command in a subprocess. - - Returns a tuple of (exitcode, out, err), where out is the string - output from stdout and err is the string output from stderr when - executing the command. - - :param cmd: Command string to execute - :param raise_error: If returncode is not 0 (success), then - raise a RuntimeError? Default: True) - :param no_venv: Disable the virtual environment - :param exec_env: Optional dictionary of additional environment - variables; values may be callables, which will - be passed the current value of the named - environment variable - :param expect_exit: Optional flag true iff timely exit is expected - :param expected_exitcode: expected exitcode from the launcher - :param context: additional context for error message - """ - - env = os.environ.copy() - if exec_env is not None: - for env_name, env_val in exec_env.items(): - if callable(env_val): - env[env_name] = env_val(env.get(env_name)) - else: - env[env_name] = env_val - - # If we're asked to omit the virtualenv, and if one is set up, - # restore the various environment variables - if no_venv and 'VIRTUAL_ENV' in env: - # Clip off the first element of PATH - env['PATH'] = env['PATH'].split(os.pathsep, 1)[-1] - del env['VIRTUAL_ENV'] - - # Make sure that we use the programs in the - # current source directory's bin/ directory. - path_ext = [os.path.join(os.getcwd(), 'bin')] - - # Also jack in the path cmd comes from, if it's absolute - args = shlex.split(cmd) - executable = args[0] - if os.path.isabs(executable): - path_ext.append(os.path.dirname(executable)) - - env['PATH'] = ':'.join(path_ext) + ':' + env['PATH'] - process = subprocess.Popen(args, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env=env) - if expect_exit: - result = process.communicate() - (out, err) = result - exitcode = process.returncode - else: - out = '' - err = '' - exitcode = 0 - - if exitcode != expected_exitcode and raise_error: - msg = ("Command %(cmd)s did not succeed. Returned an exit " - "code of %(exitcode)d." - "\n\nSTDOUT: %(out)s" - "\n\nSTDERR: %(err)s" % {'cmd': cmd, 'exitcode': exitcode, - 'out': out, 'err': err}) - if context: - msg += "\n\nCONTEXT: %s" % context - raise RuntimeError(msg) - return exitcode, out, err - - -def find_executable(cmdname): - """Searches the path for a given cmdname. - - Returns an absolute filename if an executable with the given - name exists in the path, or None if one does not. - - :param cmdname: The bare name of the executable to search for - """ - - # Keep an eye out for the possibility of an absolute pathname - if os.path.isabs(cmdname): - return cmdname - - # Get a list of the directories to search - path = ([os.path.join(os.getcwd(), 'bin')] + - os.environ['PATH'].split(os.pathsep)) - - # Search through each in turn - for elem in path: - full_path = os.path.join(elem, cmdname) - if os.access(full_path, os.X_OK): - return full_path - - # No dice... - return None - - -def get_unused_port(): - """Returns an unused port on localhost. - """ - port, s = get_unused_port_and_socket() - s.close() - return port - - -def get_unused_port_and_socket(): - """Returns an unused port on localhost and the open socket - from which it was created. - """ - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.bind(('localhost', 0)) - addr, port = s.getsockname() - return (port, s) - - -def xattr_writes_supported(path): - """Returns True if the we can write a file to the supplied - path and subsequently write a xattr to that file. - """ - try: - import xattr - except ImportError: - return False - - def set_xattr(path, key, value): - xattr.setxattr(path, "user.%s" % key, value) - - # We do a quick attempt to write a user xattr to a temporary file - # to check that the filesystem is even enabled to support xattrs - fake_filepath = os.path.join(path, 'testing-checkme') - result = True - with open(fake_filepath, 'wb') as fake_file: - fake_file.write(b"XXX") - fake_file.flush() - try: - set_xattr(fake_filepath, 'hits', b'1') - except IOError as e: - if e.errno == errno.EOPNOTSUPP: - result = False - else: - # Cleanup after ourselves... - if os.path.exists(fake_filepath): - os.unlink(fake_filepath) - - return result - - -def safe_mkdirs(path): - try: - os.makedirs(path) - except OSError as e: - if e.errno != errno.EEXIST: - raise diff --git a/glare/tests/var/certificate.crt b/glare/tests/var/certificate.crt deleted file mode 100644 index e5f415a..0000000 --- a/glare/tests/var/certificate.crt +++ /dev/null @@ -1,92 +0,0 @@ -# > openssl x509 -in glare/tests/var/certificate.crt -noout -text -# Certificate: -# Data: -# Version: 1 (0x0) -# Serial Number: 1 (0x1) -# Signature Algorithm: sha1WithRSAEncryption -# Issuer: C=AU, ST=Some-State, O=OpenStack, OU=Glare, CN=Glare CA -# Validity -# Not Before: Feb 2 20:22:13 2015 GMT -# Not After : Jan 31 20:22:13 2024 GMT -# Subject: C=AU, ST=Some-State, O=OpenStack, OU=Glare, CN=127.0.0.1 -# Subject Public Key Info: -# Public Key Algorithm: rsaEncryption -# RSA Public Key: (4096 bit) -# Modulus (4096 bit): -# 00:9f:44:13:51:de:e9:5a:f7:ac:33:2a:1a:4c:91: -# a1:73:bc:f3:a6:d3:e6:59:ae:e8:e2:34:68:3e:f4: -# 40:c1:a1:1a:65:9a:a3:67:e9:2c:b9:79:9c:00:b1: -# 7c:c1:e6:9e:de:47:bf:f1:cb:f2:73:d4:c3:62:fe: -# 82:90:6f:b4:75:ca:7e:56:8f:99:3d:06:51:3c:40: -# f4:ff:74:97:4f:0d:d2:e6:66:76:8d:97:bf:89:ce: -# fe:b2:d7:89:71:f2:a0:d9:f5:26:7c:1a:7a:bf:2b: -# 8f:72:80:e7:1f:4d:4a:40:a3:b9:9e:33:f6:55:e0: -# 40:2b:1e:49:e4:8c:71:9d:11:32:cf:21:41:e1:13: -# 28:c6:d6:f6:e0:b3:26:10:6d:5b:63:1d:c3:ee:d0: -# c4:66:63:38:89:6b:8f:2a:c2:bd:4f:e4:bc:03:8f: -# a2:f2:5c:1d:73:11:9c:7b:93:3d:d6:a3:d1:2d:cd: -# 64:23:24:bc:65:3c:71:20:28:60:a0:ea:fe:77:0e: -# 1d:95:36:76:ad:e7:2f:1c:27:62:55:e3:9d:11:c1: -# fb:43:3e:e5:21:ac:fd:0e:7e:3d:c9:44:d2:bd:6f: -# 89:7e:0f:cb:88:54:57:fd:8d:21:c8:34:e1:47:01: -# 28:0f:45:a1:7e:60:1a:9c:4c:0c:b8:c1:37:2d:46: -# ab:18:9e:ca:49:d3:77:b7:92:3a:d2:7f:ca:d5:02: -# f1:75:81:66:39:51:aa:bc:d7:f0:91:23:69:e8:71: -# ae:44:76:5e:87:54:eb:72:fc:ac:fd:60:22:e0:6a: -# e4:ad:37:b7:f6:e5:24:b4:95:2c:26:0e:75:a0:e9: -# ed:57:be:37:42:64:1f:02:49:0c:bd:5d:74:6d:e6: -# f2:da:5c:54:82:fa:fc:ff:3a:e4:1a:7a:a9:3c:3d: -# ee:b5:df:09:0c:69:c3:51:92:67:80:71:9b:10:8b: -# 20:ff:a2:5e:c5:f2:86:a0:06:65:1c:42:f9:91:24: -# 54:29:ed:7e:ec:db:4c:7b:54:ee:b1:25:1b:38:53: -# ae:01:b6:c5:93:1e:a3:4d:1b:e8:73:47:50:57:e8: -# ec:a0:80:53:b1:34:74:37:9a:c1:8c:14:64:2e:16: -# dd:a1:2e:d3:45:3e:2c:46:62:20:2a:93:7a:92:4c: -# b2:cc:64:47:ad:63:32:0b:68:0c:24:98:20:83:08: -# 35:74:a7:68:7a:ef:d6:84:07:d1:5e:d7:c0:6c:3f: -# a7:4a:78:62:a8:70:75:37:fb:ce:1f:09:1e:7c:11: -# 35:cc:b3:5a:a3:cc:3f:35:c9:ee:24:6f:63:f8:54: -# 6f:7c:5b:b4:76:3d:f2:81:6d:ad:64:66:10:d0:c4: -# 0b:2c:2f -# Exponent: 65537 (0x10001) -# Signature Algorithm: sha1WithRSAEncryption -# 5f:e8:a8:93:20:6c:0f:12:90:a6:e2:64:21:ed:63:0e:8c:e0: -# 0f:d5:04:13:4d:2a:e9:a5:91:b7:e4:51:94:bd:0a:70:4b:94: -# c7:1c:94:ed:d7:64:95:07:6b:a1:4a:bc:0b:53:b5:1a:7e:f1: -# 9c:12:59:24:5f:36:72:34:ca:33:ee:28:46:fd:21:e6:52:19: -# 0c:3d:94:6b:bd:cb:76:a1:45:7f:30:7b:71:f1:84:b6:3c:e0: -# ac:af:13:81:9c:0e:6e:3c:9b:89:19:95:de:8e:9c:ef:70:ac: -# 07:ae:74:42:47:35:50:88:36:ec:32:1a:55:24:08:f2:44:57: -# 67:fe:0a:bb:6b:a7:bd:bc:af:bf:2a:e4:dd:53:84:6b:de:1d: -# 2a:28:21:38:06:7a:5b:d8:83:15:65:31:6d:61:67:00:9e:1a: -# 61:85:15:a2:4c:9a:eb:6d:59:8e:34:ac:2c:d5:24:4e:00:ff: -# 30:4d:a3:d5:80:63:17:52:65:ac:7f:f4:0a:8e:56:a4:97:51: -# 39:81:ae:e8:cb:52:09:b3:47:b4:fd:1b:e2:04:f9:f2:76:e3: -# 63:ef:90:aa:54:98:96:05:05:a9:91:76:18:ed:5d:9e:6e:88: -# 50:9a:f7:2c:ce:5e:54:ba:15:ec:62:ff:5d:be:af:35:03:b1: -# 3f:32:3e:0e ------BEGIN CERTIFICATE----- -MIIEKjCCAxICAQEwDQYJKoZIhvcNAQEFBQAwWzELMAkGA1UEBhMCQVUxEzARBgNV -BAgMClNvbWUtU3RhdGUxEjAQBgNVBAoMCU9wZW5TdGFjazEPMA0GA1UECwwGR2xh -bmNlMRIwEAYDVQQDDAlHbGFuY2UgQ0EwHhcNMTUwMjAyMjAyMjEzWhcNMjQwMTMx -MjAyMjEzWjBbMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTESMBAG -A1UEChMJT3BlblN0YWNrMQ8wDQYDVQQLEwZHbGFuY2UxEjAQBgNVBAMTCTEyNy4w -LjAuMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAJ9EE1He6Vr3rDMq -GkyRoXO886bT5lmu6OI0aD70QMGhGmWao2fpLLl5nACxfMHmnt5Hv/HL8nPUw2L+ -gpBvtHXKflaPmT0GUTxA9P90l08N0uZmdo2Xv4nO/rLXiXHyoNn1Jnwaer8rj3KA -5x9NSkCjuZ4z9lXgQCseSeSMcZ0RMs8hQeETKMbW9uCzJhBtW2Mdw+7QxGZjOIlr -jyrCvU/kvAOPovJcHXMRnHuTPdaj0S3NZCMkvGU8cSAoYKDq/ncOHZU2dq3nLxwn -YlXjnRHB+0M+5SGs/Q5+PclE0r1viX4Py4hUV/2NIcg04UcBKA9FoX5gGpxMDLjB -Ny1GqxieyknTd7eSOtJ/ytUC8XWBZjlRqrzX8JEjaehxrkR2XodU63L8rP1gIuBq -5K03t/blJLSVLCYOdaDp7Ve+N0JkHwJJDL1ddG3m8tpcVIL6/P865Bp6qTw97rXf -CQxpw1GSZ4BxmxCLIP+iXsXyhqAGZRxC+ZEkVCntfuzbTHtU7rElGzhTrgG2xZMe -o00b6HNHUFfo7KCAU7E0dDeawYwUZC4W3aEu00U+LEZiICqTepJMssxkR61jMgto -DCSYIIMINXSnaHrv1oQH0V7XwGw/p0p4YqhwdTf7zh8JHnwRNcyzWqPMPzXJ7iRv -Y/hUb3xbtHY98oFtrWRmENDECywvAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAF/o -qJMgbA8SkKbiZCHtYw6M4A/VBBNNKumlkbfkUZS9CnBLlMcclO3XZJUHa6FKvAtT -tRp+8ZwSWSRfNnI0yjPuKEb9IeZSGQw9lGu9y3ahRX8we3HxhLY84KyvE4GcDm48 -m4kZld6OnO9wrAeudEJHNVCINuwyGlUkCPJEV2f+Crtrp728r78q5N1ThGveHSoo -ITgGelvYgxVlMW1hZwCeGmGFFaJMmuttWY40rCzVJE4A/zBNo9WAYxdSZax/9AqO -VqSXUTmBrujLUgmzR7T9G+IE+fJ242PvkKpUmJYFBamRdhjtXZ5uiFCa9yzOXlS6 -Fexi/12+rzUDsT8yPg4= ------END CERTIFICATE----- diff --git a/glare/tests/var/hooks.zip b/glare/tests/var/hooks.zip deleted file mode 100644 index 52b2c4eb0e5cc388a85a11dd6c5ce9ad1249e4e1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 818 zcmWIWW@h1H0D;Wtr(R$Nl;B{HVMxo*Nl7g-)DI2eWMF=_C?<08%9zN~3T_5QmKV$n z3}7MvXo3h(4+q$UkHMcC*?>H5Am&FhAt@`ZV<>YG=sdoni%yMK*m8FRD$Lp z0VLxf4l{x{><=M_flNxUb7+A%OdN|z$;ruZ2Z9U)VRQ%jwLu*S@vk$`Sf{6I{V-$M zfyO2#Cc=%G2Q&hN(T(wG0UN`}B*%;^=p~?$Ai(g}5yV791S=#W(98_*2APH{8X%@I z0?k>{=!9e%JW}u(iYqdZoj4K6P(;)KO~n$5$WFzK7-Xm3#$qZwng| - ServerName example.com - SetEnv GLARE_CONFIG_FILE=/etc/glare/glare.conf - DocumentRoot /path/to/public_html/ - WSGIScriptAlias / /usr/lib/python2.7/site-packages/glare/wsgi.py - ... - - -""" - -import os - -import glance_store -from oslo_config import cfg -from oslo_log import log as logging - -from glare.common import config - - -CONF = cfg.CONF -logging.register_options(CONF) -CONFIG_FILE = os.environ.get("GLARE_CONFIG_FILE", "etc/glare.conf") -config.parse_args(args=["--config-file", CONFIG_FILE]) - -glance_store.register_opts(CONF) -glance_store.create_stores(CONF) -glance_store.verify_default_store() - -application = config.load_paste_app('glare-api') diff --git a/glare_tempest_plugin/__init__.py b/glare_tempest_plugin/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare_tempest_plugin/clients.py b/glare_tempest_plugin/clients.py deleted file mode 100644 index 4949cdd..0000000 --- a/glare_tempest_plugin/clients.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from tempest.common import credentials_factory as common_creds -from tempest import config -from tempest.lib import auth - - -from glare_tempest_plugin.services.artifacts import artifacts_client - -CONF = config.CONF - - -class Manager(object): - - def __init__(self, - credentials=common_creds.get_configured_admin_credentials( - 'identity_admin')): - self.auth_provider = get_auth_provider(credentials) - - self.artifacts_client = artifacts_client.ArtifactsClient( - self.auth_provider) - - -def get_auth_provider(credentials, scope='project'): - default_params = { - 'disable_ssl_certificate_validation': - CONF.identity.disable_ssl_certificate_validation, - 'ca_certs': CONF.identity.ca_certificates_file, - 'trace_requests': CONF.debug.trace_requests - } - - if isinstance(credentials, auth.KeystoneV3Credentials): - auth_provider_class, auth_url = \ - auth.KeystoneV3AuthProvider, CONF.identity.uri_v3 - else: - auth_provider_class, auth_url = \ - auth.KeystoneV2AuthProvider, CONF.identity.uri - - _auth_provider = auth_provider_class(credentials, auth_url, - scope=scope, - **default_params) - _auth_provider.set_auth() - return _auth_provider diff --git a/glare_tempest_plugin/config.py b/glare_tempest_plugin/config.py deleted file mode 100644 index 6732ba5..0000000 --- a/glare_tempest_plugin/config.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2016 Red Hat, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from oslo_config import cfg - -service_available_group = cfg.OptGroup(name='service_available', - title='Available OpenStack Services') - -ServiceAvailableGroup = [ - cfg.BoolOpt("glare", - default=True, - help="Whether or not glare is expected to be available") -] - -artifacts_group = cfg.OptGroup(name="artifacts", - title='Glare Options') - -ArtifactGroup = [ - cfg.StrOpt("catalog_type", - default="artifact"), - cfg.StrOpt("endpoint_type", - default="publicURL", - choices=["publicURL", "adminURL", "internalURL"], - help="The endpoint type for artifacts service") -] diff --git a/glare_tempest_plugin/contrib/gate_hook.sh b/glare_tempest_plugin/contrib/gate_hook.sh deleted file mode 100644 index 8e40c3e..0000000 --- a/glare_tempest_plugin/contrib/gate_hook.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2017 - Nokia -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/glare_tempest_plugin/contrib/post_test_hook.sh b/glare_tempest_plugin/contrib/post_test_hook.sh deleted file mode 100644 index 8e40c3e..0000000 --- a/glare_tempest_plugin/contrib/post_test_hook.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2017 - Nokia -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/glare_tempest_plugin/contrib/pre_test_hook.sh b/glare_tempest_plugin/contrib/pre_test_hook.sh deleted file mode 100644 index 8e40c3e..0000000 --- a/glare_tempest_plugin/contrib/pre_test_hook.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2017 - Nokia -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/glare_tempest_plugin/plugin.py b/glare_tempest_plugin/plugin.py deleted file mode 100644 index c552bf1..0000000 --- a/glare_tempest_plugin/plugin.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2016 Red Hat, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import os - - -from oslo_config import cfg -from tempest import config -from tempest.test_discover import plugins - - -from glare_tempest_plugin import config as glare_config - - -class GlareTempestPlugin(plugins.TempestPlugin): - def load_tests(self): - base_path = os.path.split(os.path.dirname( - os.path.abspath(__file__)))[0] - test_dir = "glare_tempest_plugin/tests" - full_test_dir = os.path.join(base_path, test_dir) - return full_test_dir, base_path - - def register_opts(self, conf): - try: - config.register_opt_group( - conf, glare_config.service_available_group, - glare_config.ServiceAvailableGroup - ) - except cfg.DuplicateOptError: - pass - try: - config.register_opt_group(conf, glare_config.artifacts_group, - glare_config.ArtifactGroup) - except cfg.DuplicateOptError: - pass - - def get_opt_lists(self): - return [ - (glare_config.service_available_group.name, - glare_config.ServiceAvailableGroup), - (glare_config.artifacts_group.name, - glare_config.ArtifactGroup) - ] diff --git a/glare_tempest_plugin/services/__init__.py b/glare_tempest_plugin/services/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare_tempest_plugin/services/artifacts/__init__.py b/glare_tempest_plugin/services/artifacts/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare_tempest_plugin/services/artifacts/artifacts_client.py b/glare_tempest_plugin/services/artifacts/artifacts_client.py deleted file mode 100644 index aaee70d..0000000 --- a/glare_tempest_plugin/services/artifacts/artifacts_client.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright (c) 2016 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import json - -from tempest import config -from tempest.lib.common import rest_client - - -CONF = config.CONF - - -class ArtifactsClient(rest_client.RestClient): - - def __init__(self, auth_provider): - super(ArtifactsClient, self).__init__( - auth_provider, - CONF.artifacts.catalog_type, - CONF.identity.region, - endpoint_type=CONF.artifacts.endpoint_type) - - def create_artifact(self, type_name, name, version='0.0.0', **kwargs): - kwargs.update({'name': name, 'version': version}) - uri = '/artifacts/{type_name}'.format(type_name=type_name) - resp, body = self.post(uri, body=json.dumps(kwargs)) - self.expected_success(201, resp.status) - parsed = self._parse_resp(body) - return parsed - - def get_artifact(self, type_name, art_id): - uri = '/artifacts/{type_name}/{id}'.format( - type_name=type_name, - id=art_id) - resp, body = self.get(uri) - self.expected_success(200, resp.status) - parsed = self._parse_resp(body) - return parsed - - def update_artifact(self, type_name, art_id, remove_props=None, **kwargs): - headers = {'Content-Type': 'application/json-patch+json'} - uri = '/artifacts/{type_name}/{id}'.format(type_name=type_name, - id=art_id) - changes = [] - - if remove_props: - for prop_name in remove_props: - if prop_name not in kwargs: - if '/' in prop_name: - changes.append({'op': 'remove', - 'path': '/%s' % prop_name}) - else: - changes.append({'op': 'replace', - 'path': '/%s' % prop_name, - 'value': None}) - for prop_name in kwargs: - changes.append({'op': 'add', - 'path': '/%s' % prop_name, - 'value': kwargs[prop_name]}) - resp, body = self.patch(uri, json.dumps(changes), headers=headers) - self.expected_success(200, resp.status) - parsed = self._parse_resp(body) - return parsed - - def activate_artifact(self, type_name, art_id): - return self.update_artifact(type_name, art_id, status='active') - - def deactivate_artifact(self, type_name, art_id): - return self.update_artifact(type_name, art_id, status='deactivated') - - def reactivate_artifact(self, type_name, art_id): - return self.update_artifact(type_name, art_id, status='active') - - def publish_artifact(self, type_name, art_id): - return self.update_artifact(type_name, art_id, visibility='public') - - def upload_blob(self, type_name, art_id, blob_property, data): - headers = {'Content-Type': 'application/octet-stream'} - uri = '/artifacts/{type_name}/{id}/{blob_prop}'.format( - type_name=type_name, - id=art_id, - blob_prop=blob_property) - resp, body = self.put(uri, data, headers=headers) - self.expected_success(200, resp.status) - parsed = self._parse_resp(body) - return parsed - - def download_blob(self, type_name, art_id, blob_property): - uri = '/artifacts/{type_name}/{id}/{blob_prop}'.format( - type_name=type_name, - id=art_id, - blob_prop=blob_property) - resp, body = self.get(uri) - self.expected_success(200, resp.status) - parsed = self._parse_resp(body) - return parsed - - def delete_artifact(self, type_name, art_id): - uri = '/artifacts/{type_name}/{id}'.format( - type_name=type_name, - id=art_id) - self.delete(uri) - - def list_artifacts(self, type_name): - uri = '/artifacts/{}'.format(type_name) - resp, body = self.get(uri) - self.expected_success(200, resp.status) - parsed = self._parse_resp(body) - return parsed diff --git a/glare_tempest_plugin/tests/__init__.py b/glare_tempest_plugin/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare_tempest_plugin/tests/api/__init__.py b/glare_tempest_plugin/tests/api/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/glare_tempest_plugin/tests/api/base.py b/glare_tempest_plugin/tests/api/base.py deleted file mode 100644 index f6123b3..0000000 --- a/glare_tempest_plugin/tests/api/base.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2016 Red Hat, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from glare_tempest_plugin import clients -from tempest.common import credentials_factory as common_creds -from tempest.common import dynamic_creds -from tempest import config -from tempest.lib import base - - -CONF = config.CONF - - -class BaseArtifactTest(base.BaseTestCase): - - @classmethod - def setUpClass(cls): - super(BaseArtifactTest, cls).setUpClass() - cls.resource_setup() - pass - - @classmethod - def tearDownClass(cls): - pass - - @classmethod - def get_client_with_isolated_creds(cls, type_of_creds="admin"): - creds = cls.get_configured_isolated_creds( - type_of_creds=type_of_creds) - - os = clients.Manager(credentials=creds) - client = os.artifact_client - return client - - @classmethod - def resource_setup(cls): - if not CONF.service_available.glare: - skip_msg = "Glare is disabled" - raise cls.skipException(skip_msg) - if not hasattr(cls, "os"): - creds = cls.get_configured_isolated_creds( - type_of_creds='primary') - cls.os = clients.Manager(credentials=creds) - cls.artifacts_client = cls.os.artifacts_client - - @classmethod - def get_configured_isolated_creds(cls, type_of_creds='admin'): - identity_version = CONF.identity.auth_version - if identity_version == 'v3': - cls.admin_role = CONF.identity.admin_role - else: - cls.admin_role = 'admin' - cls.dynamic_cred = dynamic_creds.DynamicCredentialProvider( - identity_version=CONF.identity.auth_version, - name=cls.__name__, admin_role=cls.admin_role, - admin_creds=common_creds.get_configured_admin_credentials( - 'identity_admin')) - if type_of_creds == 'primary': - creds = cls.dynamic_cred.get_primary_creds() - elif type_of_creds == 'admin': - creds = cls.dynamic_cred.get_admin_creds() - elif type_of_creds == 'alt': - creds = cls.dynamic_cred.get_alt_creds() - else: - creds = cls.dynamic_cred.get_credentials(type_of_creds) - cls.dynamic_cred.type_of_creds = type_of_creds - - return creds.credentials diff --git a/glare_tempest_plugin/tests/api/test_list_artifact.py b/glare_tempest_plugin/tests/api/test_list_artifact.py deleted file mode 100644 index 4b516df..0000000 --- a/glare_tempest_plugin/tests/api/test_list_artifact.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2016 Red Hat, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import testtools - - -from glare_tempest_plugin.tests.api import base -from tempest import config - - -CONF = config.CONF - - -class TestListSanity(base.BaseArtifactTest): - - @testtools.testcase.attr('smoke') - def test_list_artifacts(self): - art = self.artifacts_client.create_artifact('images', 'tempest_test') - self.artifacts_client.list_artifacts('images') - self.artifacts_client.get_artifact('images', art['id']) - self.artifacts_client.update_artifact(type_name='images', - art_id=art['id'], - name='newnewname') - data = 'dataaaa' - self.artifacts_client.upload_blob('images', art['id'], 'image', data) - self.artifacts_client.download_blob('images', art['id'], 'image') - self.artifacts_client.delete_artifact('images', art['id']) diff --git a/pylintrc b/pylintrc deleted file mode 100644 index 6b073fd..0000000 --- a/pylintrc +++ /dev/null @@ -1,27 +0,0 @@ -[Messages Control] -# W0511: TODOs in code comments are fine. -# W0142: *args and **kwargs are fine. -# W0622: Redefining id is fine. -disable-msg=W0511,W0142,W0622 - -[Basic] -# Variable names can be 1 to 31 characters long, with lowercase and underscores -variable-rgx=[a-z_][a-z0-9_]{0,30}$ - -# Argument names can be 2 to 31 characters long, with lowercase and underscores -argument-rgx=[a-z_][a-z0-9_]{1,30}$ - -# Method names should be at least 3 characters long -# and be lowercased with underscores -method-rgx=[a-z_][a-z0-9_]{2,50}$ - -# Module names matching nova-* are ok (files in bin/) -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+)|(nova-[a-z0-9_-]+))$ - -# Don't require docstrings on tests. -no-docstring-rgx=((__.*__)|([tT]est.*)|setUp|tearDown)$ - -[Design] -max-public-methods=100 -min-public-methods=0 -max-args=6 diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index bc53535..0000000 --- a/requirements.txt +++ /dev/null @@ -1,57 +0,0 @@ -# The order of packages is significant, because pip processes them in the order -# of appearance. Changing the order has an impact on the overall integration -# process, which may cause wedges in the gate later. - -pbr!=2.1.0,>=2.0.0 # Apache-2.0 - -SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT -alembic>=0.8.10 # MIT -eventlet!=0.18.3,!=0.20.1,<0.21.0,>=0.18.2 # MIT -PasteDeploy>=1.5.0 # MIT -Routes>=2.3.1 # MIT -WebOb>=1.7.1 # MIT -httplib2>=0.7.5 # MIT -oslo.config!=4.3.0,!=4.4.0,>=4.0.0 # Apache-2.0 -oslo.concurrency>=3.8.0 # Apache-2.0 -oslo.context>=2.14.0 # Apache-2.0 -oslo.service>=1.10.0 # Apache-2.0 -oslo.utils>=3.20.0 # Apache-2.0 -futurist!=0.15.0,>=0.11.0 # Apache-2.0 -keystoneauth1>=3.0.1 # Apache-2.0 -keystonemiddleware>=4.12.0 # Apache-2.0 -python-memcached>=1.56 # PSF -WSME>=0.8 # MIT -PyJWT>=1.0.1 # MIT -cryptography>=1.6 # BSD/Apache-2.0 - -# For paste.util.template used in keystone.common.template -Paste # MIT - -jsonpatch>=1.1 # BSD -jsonschema!=2.5.0,<3.0.0,>=2.0.0 # MIT -pyOpenSSL>=0.14 # Apache-2.0 -# Required by openstack.common libraries -six>=1.9.0 # MIT - -oslo.db>=4.24.0 # Apache-2.0 -oslo.i18n!=3.15.2,>=2.1.0 # Apache-2.0 -oslo.log>=3.22.0 # Apache-2.0 -oslo.messaging!=5.25.0,>=5.24.2 # Apache-2.0 -oslo.middleware>=3.27.0 # Apache-2.0 -oslo.policy>=1.23.0 # Apache-2.0 -oslo.serialization!=2.19.1,>=1.10.0 # Apache-2.0 -oslo.versionedobjects>=1.17.0 # Apache-2.0 - -retrying!=1.3.0,>=1.2.3 # Apache-2.0 -osprofiler>=1.4.0 # Apache-2.0 - -# Glance Store -glance-store>=0.18.0 # Apache-2.0 - -# Artifact repository -microversion-parse>=0.1.2 # Apache-2.0 -semantic-version>=2.3.1 # BSD - -# timeutils -iso8601>=0.1.11 # MIT -monotonic>=0.6 # Apache-2.0 diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 058d675..0000000 --- a/setup.cfg +++ /dev/null @@ -1,69 +0,0 @@ -[metadata] -name = glare -summary = OpenStack Artifact Service -description-file = README.rst -author = OpenStack -author-email = openstack-dev@lists.openstack.org -home-page = http://docs.openstack.org/developer/glare/ -classifier = - Environment :: OpenStack - Intended Audience :: Information Technology - Intended Audience :: System Administrators - License :: OSI Approved :: Apache Software License - Operating System :: POSIX :: Linux - Programming Language :: Python - Programming Language :: Python :: 2 - Programming Language :: Python :: 2.7 - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.5 - -[files] -packages = - glare - glare_tempest_plugin - -data_files = - etc/glare = - etc/glare-paste.ini - etc/glare-swift.conf.sample - -[entry_points] -console_scripts = - glare-api = glare.cmd.api:main - glare-db-manage = glare.cmd.db_manage:main - glare-scrubber = glare.cmd.scrubber:main -oslo.config.opts = - glare = glare.opts:list_artifacts_opts -oslo.policy.enforcer = - glare = glare.common.policy:_get_enforcer -oslo.policy.policies = - glare = glare.common.policy:list_rules -tempest.test_plugins = - glare_tempest_tests = glare_tempest_plugin.plugin:GlareTempestPlugin - -[build_sphinx] -all_files = 1 -build-dir = doc/build -source-dir = doc/source - -[egg_info] -tag_build = -tag_date = 0 -tag_svn_revision = 0 - -[compile_catalog] -directory = glare/locale -domain = glare - -[update_catalog] -domain = glare -output_dir = glare/locale -input_file = glare/locale/glare.pot - -[extract_messages] -keywords = _ gettext ngettext l_ lazy_gettext -mapping_file = babel.cfg -output_file = glare/locale/glare.pot - -[pbr] -autodoc_tree_index_modules = True diff --git a/setup.py b/setup.py deleted file mode 100644 index 566d844..0000000 --- a/setup.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT -import setuptools - -# In python < 2.7.4, a lazy loading of package `pbr` will break -# setuptools if some other modules registered functions in `atexit`. -# solution from: http://bugs.python.org/issue15881#msg170215 -try: - import multiprocessing # noqa -except ImportError: - pass - -setuptools.setup( - setup_requires=['pbr>=2.0.0'], - pbr=True) diff --git a/test-requirements.txt b/test-requirements.txt deleted file mode 100644 index d3e6f65..0000000 --- a/test-requirements.txt +++ /dev/null @@ -1,38 +0,0 @@ -# The order of packages is significant, because pip processes them in the order -# of appearance. Changing the order has an impact on the overall integration -# process, which may cause wedges in the gate later. - -# Hacking already pins down pep8, pyflakes and flake8 -hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0 - -# For translations processing -Babel!=2.4.0,>=2.3.4 # BSD - -# Needed for testing -bandit>=1.1.0 # Apache-2.0 -coverage!=4.4,>=4.0 # Apache-2.0 -fixtures>=3.0.0 # Apache-2.0/BSD -mox3!=0.19.0,>=0.7.0 # Apache-2.0 -mock>=2.0 # BSD -sphinx>=1.6.2 # BSD -requests>=2.14.2 # Apache-2.0 -testrepository>=0.0.18 # Apache-2.0/BSD -testresources>=0.2.4 # Apache-2.0/BSD -testscenarios>=0.4 # Apache-2.0/BSD -testtools>=1.4.0 # MIT -psutil>=3.2.2 # BSD -oslotest>=1.10.0 # Apache-2.0 -os-testr>=0.8.0 # Apache-2.0 - -# Optional packages that should be installed when testing -PyMySQL>=0.7.6 # MIT License -psycopg2>=2.5 # LGPL/ZPL -pysendfile>=2.0.0 # MIT -qpid-python;python_version=='2.7' # Apache-2.0 -xattr>=0.4 # MIT -python-swiftclient>=3.2.0 # Apache-2.0 - -# Documentation -os-api-ref>=1.0.0 # Apache-2.0 -oslosphinx>=4.7.0 # Apache-2.0 -reno!=2.3.1,>=1.8.0 # Apache-2.0 diff --git a/tools/test-setup.sh b/tools/test-setup.sh deleted file mode 100755 index 07a0785..0000000 --- a/tools/test-setup.sh +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/bash -xe - -# This script will be run by OpenStack CI before unit tests are run, -# it sets up the test system as needed. -# Developers should setup their test systems in a similar way. - -# This setup needs to be run as a user that can run sudo. - -# The root password for the MySQL database; pass it in via -# MYSQL_ROOT_PW. -DB_ROOT_PW=${MYSQL_ROOT_PW:-insecure_slave} - -# This user and its password are used by the tests, if you change it, -# your tests might fail. -DB_USER=openstack_citest -DB_PW=openstack_citest - -sudo -H mysqladmin -u root password $DB_ROOT_PW - -# It's best practice to remove anonymous users from the database. If -# a anonymous user exists, then it matches first for connections and -# other connections from that host will not work. -sudo -H mysql -u root -p$DB_ROOT_PW -h localhost -e " - DELETE FROM mysql.user WHERE User=''; - FLUSH PRIVILEGES; - GRANT ALL PRIVILEGES ON *.* - TO '$DB_USER'@'%' identified by '$DB_PW' WITH GRANT OPTION;" - -# Now create our database. -mysql -u $DB_USER -p$DB_PW -h 127.0.0.1 -e " - SET default_storage_engine=MYISAM; - DROP DATABASE IF EXISTS openstack_citest; - CREATE DATABASE openstack_citest CHARACTER SET utf8;" - -# Same for PostgreSQL -# The root password for the PostgreSQL database; pass it in via -# POSTGRES_ROOT_PW. -DB_ROOT_PW=${POSTGRES_ROOT_PW:-insecure_slave} - -# Setup user -root_roles=$(sudo -H -u postgres psql -t -c " - SELECT 'HERE' from pg_roles where rolname='$DB_USER'") -if [[ ${root_roles} == *HERE ]];then - sudo -H -u postgres psql -c "ALTER ROLE $DB_USER WITH SUPERUSER LOGIN PASSWORD '$DB_PW'" -else - sudo -H -u postgres psql -c "CREATE ROLE $DB_USER WITH SUPERUSER LOGIN PASSWORD '$DB_PW'" -fi - -# Store password for tests -cat << EOF > $HOME/.pgpass -*:*:*:$DB_USER:$DB_PW -EOF -chmod 0600 $HOME/.pgpass - -# Now create our database -psql -h 127.0.0.1 -U $DB_USER -d template1 -c "DROP DATABASE IF EXISTS openstack_citest" -createdb -h 127.0.0.1 -U $DB_USER -l C -T template0 -E utf8 openstack_citest diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 6b72fb1..0000000 --- a/tox.ini +++ /dev/null @@ -1,74 +0,0 @@ -[tox] -minversion = 1.6 -envlist = py27,pep8,py35 -skipsdist = True - -[testenv] -setenv = VIRTUAL_ENV={envdir} - PYTHONDONTWRITEBYTECODE = 1 - LANGUAGE=en_US -usedevelop = True -install_command = pip install -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} {opts} {packages} -deps = -r{toxinidir}/test-requirements.txt -commands = - /bin/rm -f .testrepository/times.dbm - ostestr --slowest {posargs} -whitelist_externals = bash -passenv = *_proxy *_PROXY - -[testenv:debug] -commands = oslo_debug_helper {posargs} - -[testenv:debug-py27] -basepython = python2.7 -commands = oslo_debug_helper {posargs} - -[testenv:debug-py35] -basepython = python3.5 -commands = oslo_debug_helper {posargs} - -[testenv:pep8] -commands = - flake8 {posargs} - # Run security linter - bandit -c bandit.yaml -r glare -n5 -p gate - -[testenv:cover] -basepython = python2.7 -setenv = VIRTUAL_ENV={envdir} -commands = - coverage erase - python setup.py testr --coverage --testr-args='^(?!.*test.*coverage).*$' --omit="*/test*" - -[testenv:venv] -commands = {posargs} - -[testenv:genconfig] -commands = - oslo-config-generator --config-file etc/oslo-config-generator/glare.conf - -[testenv:genpolicy] -sitepackages = False -envdir = {toxworkdir}/venv -commands = - oslopolicy-sample-generator --namespace=glare --output-file=etc/policy.yaml.sample - -[testenv:docs] -basepython = python2.7 -commands = python setup.py build_sphinx - -[testenv:bandit] -commands = bandit -c bandit.yaml -r glare -n5 -p gate - -[flake8] -# TODO(dmllr): Analyze or fix the warnings blacklisted below -# H404 multi line docstring should start with a summary -# H405 multi line docstring summary not separated with an empty line -ignore = H404,H405 -exclude = .venv,.git,.tox,dist,doc,etc,*glare/locale*,*lib/python*,*egg,build -# H904 Delay string interpolations at logging calls. -enable-extensions = H106,H203,H904 - -[hacking] -local-check-factory = glare.hacking.checks.factory -import_exceptions = glare.i18n