diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index abfa1fe..0000000 --- a/.coveragerc +++ /dev/null @@ -1,7 +0,0 @@ -[run] -branch = True -source = craton -omit = craton/openstack/* - -[report] -ignore_errors = True diff --git a/.gitignore b/.gitignore deleted file mode 100644 index e52bb44..0000000 --- a/.gitignore +++ /dev/null @@ -1,55 +0,0 @@ -*.py[cod] - -# C extensions -*.so - -# Packages -*.egg* -*.egg-info -dist -build -eggs -parts -bin -var -sdist -develop-eggs -.installed.cfg -lib -lib64 - -# Installer logs -pip-log.txt - -# Unit test / coverage reports -cover/ -.coverage* -!.coveragerc -.tox -nosetests.xml -.testrepository -.venv - -# Translations -*.mo - -# Mr Developer -.mr.developer.cfg -.project -.pydevproject - -# Complexity -output/*.html -output/*/index.html - -# Sphinx -doc/build - -# pbr generates these -AUTHORS -ChangeLog - -# Editors -*~ -.*.swp -.*sw? diff --git a/.gitreview b/.gitreview deleted file mode 100644 index 1dc162f..0000000 --- a/.gitreview +++ /dev/null @@ -1,4 +0,0 @@ -[gerrit] -host=review.openstack.org -port=29418 -project=openstack/craton.git diff --git a/.mailmap b/.mailmap deleted file mode 100644 index 516ae6f..0000000 --- a/.mailmap +++ /dev/null @@ -1,3 +0,0 @@ -# Format is: -# -# diff --git a/.testr.conf b/.testr.conf deleted file mode 100644 index 6d83b3c..0000000 --- a/.testr.conf +++ /dev/null @@ -1,7 +0,0 @@ -[DEFAULT] -test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \ - OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \ - OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \ - ${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION -test_id_option=--load-list $IDFILE -test_list_option=--list diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst deleted file mode 100755 index 064032a..0000000 --- a/CONTRIBUTING.rst +++ /dev/null @@ -1,23 +0,0 @@ -Developer's Guide ------------------ -If you would like to contribute to the development of OpenStack, you must -follow the steps in this page: - - http://docs.openstack.org/infra/manual/developers.html - -Development Workflow --------------------- -If you already have a good understanding of how the system works and your -OpenStack accounts are set up, you can skip to the development workflow -section of this documentation to learn how changes to OpenStack should be -submitted for review via the Gerrit tool: - - http://docs.openstack.org/infra/manual/developers.html#development-workflow - -Engagement ----------- -Pull requests submitted through GitHub will be ignored. - -Bugs should be filed on Launchpad, not GitHub: - - https://bugs.launchpad.net/craton diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 78b5ee9..0000000 --- a/Dockerfile +++ /dev/null @@ -1,85 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -############################################################################ -## Usage: -## docker build --pull -t craton-api:latest . -## docker run -t --name craton-api -p 127.0.0.1:7780:7780 -d craton-api:latest -## docker logs and copy the username, api_key, and project_id -## python tools/generate_fake_data.py --url http://127.0.0.1:7780/v1 --user bootstrap --project --key -## Use the credentials from above to try different commands using python-cratonclient. -############################################################################## - -# Get Ubuntu base image -FROM ubuntu:16.04 - -# File Author / Maintainer -MAINTAINER Sulochan Acharya - -# Install required software and tools -RUN apt-get update \ - && apt-get install -y \ - gcc \ - git \ - curl \ - build-essential \ - python3.5 \ - python3.5-dev - -# Get pip -ADD https://bootstrap.pypa.io/get-pip.py /root/get-pip.py - -# Install pip -RUN python3.5 /root/get-pip.py - -# Install MySQL 5.7 -ENV MYSQL_ROOTPW root -RUN echo "mysql-server mysql-server/root_password password $MYSQL_ROOTPW" | debconf-set-selections && \ - echo "mysql-server mysql-server/root_password_again password $MYSQL_ROOTPW" | debconf-set-selections -RUN apt-get install -y mysql-server-5.7 mysql-client-5.7 -RUN service mysql start && \ - mysqladmin -u root -p"$MYSQL_ROOTPW" password '' && \ - service mysql stop - -# Change mysql bind address -RUN sed -i -e"s/^bind-address\s*=\s*127.0.0.1/bind-address = 0.0.0.0/" /etc/mysql/mysql.conf.d/mysqld.cnf - -# Install MySQL-python -RUN apt-get install -y libmysqlclient-dev python-mysqldb - -# pip install virtualenv -RUN pip3 install virtualenv - -# Expose port -EXPOSE 7780 3306 - -Add ./requirements.txt /requirements.txt - -# Init virutalenv -RUN virtualenv -p /usr/bin/python3.5 /craton - -# Change Working Dir -WORKDIR /craton - -# Install requirements -RUN bin/pip install -r /requirements.txt - -# pip install mysql-python -RUN bin/pip install mysqlclient - -# Add Craton -ADD . /craton - -# Install Craton -RUN bin/pip install . - -CMD ["tools/docker_run.sh"] diff --git a/HACKING.rst b/HACKING.rst deleted file mode 100644 index 812a0cf..0000000 --- a/HACKING.rst +++ /dev/null @@ -1,4 +0,0 @@ -craton Style Commandments -=============================================== - -Read the OpenStack Style Commandments http://docs.openstack.org/developer/hacking/ diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 68c771a..0000000 --- a/LICENSE +++ /dev/null @@ -1,176 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index e69de29..0000000 diff --git a/README.rst b/README.rst index 10fe6ab..d98af48 100644 --- a/README.rst +++ b/README.rst @@ -1,29 +1,10 @@ -Craton -====== +This project is no longer maintained. -Craton is a new project we plan to propose for OpenStack inclusion. -Craton supports deploying and operating OpenStack clouds by providing -scalable fleet management: +The contents of this repository are still available in the Git +source code management system. To see the contents of this +repository before it reached its end of life, please check out the +previous commit with "git checkout HEAD^1". -* Inventory of configurable physical devices/hosts (the fleet) -* Audit and remediation workflows against this inventory -* REST APIs, CLI, and Python client to manage - -Support for workflows, CLI, and the Python client is in progress. - -For more information, please refer to the following project resources: - -* **Free software:** under the `Apache license `_ -* **Documentation:** https://craton.readthedocs.io -* **Source:** https://github.com/openstack/craton -* **Blueprints:** https://blueprints.launchpad.net/craton -* **Bugs:** https://bugs.launchpad.net/craton - -For information on how to contribute to Craton, please see the -contents of the `CONTRIBUTING.rst file `_. - -For information on how to setup a Developer's Environment, please -see the contents of `INSTALL.RST file `_. - -For more information on Craton distribution license, please see -the contents of the `LICENSE file `_. +For any further questions, please email +openstack-dev@lists.openstack.org or join #openstack-dev on +Freenode. diff --git a/api-ref/source/v1/cells.inc b/api-ref/source/v1/cells.inc deleted file mode 100644 index 13f7382..0000000 --- a/api-ref/source/v1/cells.inc +++ /dev/null @@ -1,273 +0,0 @@ -.. -*- rst -*- - -====== -Cells -====== - -Definition of cell - -Create Cell -============ - -.. rest_method:: POST /v1/cells - -Create a new Cell - -Normal response codes: OK(201) - -Error response codes: invalid request(400), validation exception(405) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - name : cell_name - - region_id: region_id_body - - project_id: project_id - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -**Example Create Cell** (TO-DO) - -..literalinclude:: ../../doc/api_samples/cells/cells-create-req.json - :language: javascript - -Response --------- - -.. rest_parameters:: parameters.yaml - - - cell: cell - - id: cell_id_body - - name: cell_name - - region_id: region_id_body - - project_id: project_id - - note: note - - variables: variables - -**Example Create Cell** (TO-DO) - -..literalinclude:: ../../doc/api_samples/cells/cells-create-resp.json - :language: javascript - -List Cells -========== - -.. rest_method:: GET /v1/cells - -Gets all Cells - -Normal response codes: OK(200) - -Error response codes: invalid request(400), cell not found(404), validation exception(405) - -Default response: unexpected error - -Request --------- - -.. rest_parameters:: parameters.yaml - - - cell: cell_name_query - - region: region_name_query - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -Response --------- - -.. rest_parameters:: parameters.yaml - - - cells: cells - - id: cell_id_body - - name: cell_name - - region_id: region_id_body - - project_id: project_id - - note: note - - variables: variables - -**Example List Cells** (TO-DO) - -..literalinclude:: ../../doc/api_samples/cells/cells-list-resp.json - :language: javascript - -**Example Unexpected Error ** - -..literalinclude:: ../../doc/api_samples/errors/errors-unexpected-resp.json - :language: javascript - -Update Cells -============ - -.. rest_method:: PUT /v1/cells/{cell_id} - -Update an existing cell - -Normal response codes: OK(200) - -Error response codes: invalid request(400), cell not found(404), validation exception(405) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - id: cell_id_body - - name: cell_name - - region_id: region_id_body - - project_id: project_id - - note: note - - variables: variables - - cell_id: cell_id - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -**Example Update Cell** (TO-DO) - -..literalinclude:: ../../doc/api_samples/cells/cells-update-req.json - :language: javascript - -Response --------- - -.. rest_parameters:: parameters.yaml - - - cell: cell - - id: cell_id_body - - name: cell_name - - region_id: region_id_body - - project_id: project_id - - note: note - - variables: variables - -**Example Update Cell** (TO-DO) - -..literalinclude:: ../../doc/api_samples/cells/cells-update-resp.json - :language: javascript - -Update Cell Data -================== - -.. rest_method:: PUT /v1/cells/{cell_id}/variables - -Update user defined variables for the cell - -Normal response codes: OK(200) - -Error response codes: invalid request(400), cell not found(404), validation exception(405) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - key: key - - value: value - - cell_id: cell_id - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -**Example Update Cell Data** (TO-DO) - -..literalinclude:: ../../doc/api_samples/cells/cells-upadate—data-req.json - :language: javascript - -Response --------- - -.. rest_parameters:: parameters.yaml - - - key: key - - value: value - -**Example Update Cell Data** (TO-DO) - -..literalinclude:: ../../doc/api_samples/cells/cells-update-data-resp.json - :language: javascript - -Delete Cell -=========== - -.. rest_method:: DELETE /v1/cells/{cell_id} - -Deletes an existing record of a Cell - -Normal response codes: OK(200) - -Error response codes: invalid request(400), cell not found(404) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - cell_id: cell_id - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE - -Delete Cell Data -================ - -.. rest_method:: DELETE /v1/cells/{cell_id}/variables - -Delete existing key/value variable for the cell - -Normal response codes: OK(200) - -Error response codes: invalid request(400), cell not found(404) validation exception(405) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - cell_id: cell_id - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE diff --git a/api-ref/source/v1/hosts.inc b/api-ref/source/v1/hosts.inc deleted file mode 100644 index 611b7b1..0000000 --- a/api-ref/source/v1/hosts.inc +++ /dev/null @@ -1,301 +0,0 @@ -.. -*- rst -*- - -===== -Hosts -===== - -Definition of host - -Create Host -============ - -.. rest_method:: POST /v1/hosts - -Create a new host - -Normal response codes: OK(201) - -Error response codes: invalid request(400), validation exception(405) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - name: host_name - - region_id: region_id_body - - project_id: project_id - - ip_address: ip_address - - device_type: device_type - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -**Example Create Host** (TO-DO) - -..literalinclude:: ../../doc/api_samples/hosts/hosts-create-req.json - :language: javascript - -Response --------- - -.. rest_parameters:: parameters.yaml - - - host: host - - id: host_id_body - - name: host_name - - cell_id: cell_id_body - - parent_id: parent_id - - project_id: project_id - - region_id: region_id_body - - ip_address: ip_address - - device_type: device_type - - labels: labels - - note: note - - variables: variables - -**Example Create Host** (TO-DO) - -..literalinclude:: ../../doc/api_samples/hosts/hosts-create-resp.json - :language: javascript - -List Hosts -========== - -.. rest_method:: GET /v1/hosts - -Gets all Host - -Normal response codes: OK(200) - -Error response codes: invalid request(400), host not found(404), validation exception(405) - -Default response: unexpected error - -Request --------- - -.. rest_parameters:: parameters.yaml - - - limit: limit - - name: host_name_query - - id: host_id_query - - region: region_name_query - - cell: cell_name_query - - ip_address: ip_address_query - - service: service - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -Response --------- - -.. rest_parameters:: parameters.yaml - - - hosts: hosts - - id: host_id_body - - name: host_name - - cell_id: cell_id_body - - parent_id: parent_id - - project_id: project_id - - region_id: region_id_body - - ip_address: ip_address - - device_type: device_type - - labels: labels - - note: note - - variables: variables - -**Example List Host** (TO-DO) - -..literalinclude:: ../../doc/api_samples/hosts/hosts-list-resp.json - :language: javascript - -**Example Unexpected Error ** - -..literalinclude:: ../../doc/api_samples/errors/errors-unexpected-resp.json - :language: javascript - -Update Hosts -============ - -.. rest_method:: PUT /v1/hosts/{host_id} - -Update an existing host - -Normal response codes: OK(200) - -Error response codes: invalid request(400), host not found(404), validation exception(405) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - id: host_id_body - - name: host_name - - cell_id: cell_id_body - - parent_id: parent_id - - project_id: project_id - - region_id: region_id_body - - ip_address: ip_address - - device_type: device_type - - labels: labels - - note: note - - variables: variables - - host_id: host_id - - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -**Example Update Host** (TO-DO) - -..literalinclude:: ../../doc/api_samples/hosts/hosts-update-req.json - :language: javascript - -Response --------- - -.. rest_parameters:: parameters.yaml - - - host: host - - id: host_id_body - - name: host_name - - cell_id: cell_id_body - - parent_id: parent_id - - project_id: project_id - - region_id: region_id_body - - ip_address: ip_address - - device_type: device_type - - labels: labels - - note: note - - variables: variables - -**Example Update Host** (TO-DO) - -..literalinclude:: ../../doc/api_samples/hosts/hosts-update-resp.json - :language: javascript - -Update Host Data -================== - -.. rest_method:: PUT /v1/hosts/{host_id}/variables - -Update user defined variables for the host - -Normal response codes: OK(200) - -Error response codes: invalid request(400), host not found(404), validation exception(405) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - key: key - - value: value - - host_id: host_id - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -**Example Update Host Data** (TO-DO) - -..literalinclude:: ../../doc/api_samples/hosts/hosts-upadate—data-req.json - :language: javascript - -Response --------- - -.. rest_parameters:: parameters.yaml - - - key: key - - value: value - -**Example Update Host Data** (TO-DO) - -..literalinclude:: ../../doc/api_samples/hosts/hosts-update-data-resp.json - :language: javascript - -Delete Host -=========== - -.. rest_method:: DELETE /v1/hosts/{host_id} - -Deletes an existing record of a Host - -Normal response codes: OK(200) - -Error response codes: invalid request(400), host not found(404) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - host_id: host_id - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE - -Delete Host Data -================ - -.. rest_method:: DELETE /v1/hosts/{host_id}/variables - -Delete existing key/value variables for the Host - -Normal response codes: OK(200) - -Error response codes: invalid request(400), host not found(404) validation exception(405) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - host_id: host_id - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE diff --git a/api-ref/source/v1/index.rst b/api-ref/source/v1/index.rst deleted file mode 100644 index 7be943a..0000000 --- a/api-ref/source/v1/index.rst +++ /dev/null @@ -1,11 +0,0 @@ -:tocdepth: 2 - -=========== -Craton API -=========== - -.. rest_expand_all:: - -.. include:: cells.inc -.. include:: hosts.inc -.. include:: regions.inc diff --git a/api-ref/source/v1/parameters.yaml b/api-ref/source/v1/parameters.yaml deleted file mode 100644 index 48dd738..0000000 --- a/api-ref/source/v1/parameters.yaml +++ /dev/null @@ -1,201 +0,0 @@ -# variables in header -Content-Type: - description: | - Type of content sent using cURL - in: header - required: true - type: string -X-Auth-Project: - description: | - ID of the project this user is assigned to. - in: header - required: true - type: integer -X-Auth-Token: - description: | - User authentication token for the current session - in: header - required: true - type: string -X-Auth-User - description: | - User of the current session - in: header - required: true - type: string -# variables in path -cell_id: - description: | - The unique ID of the cell - in: path - required: true - type: integer -host_id: - description: | - The unique ID of the host -region_id: - description: | - The unique ID of the region - in: path - required: true - type: integer -# variables in body -cell: - description: | - A cell object - in: body - required: false - type: object -cell_id_body: - description: | - Unique ID of the cell - in: body - required: false - type: integer -cell_name: - description: | - Unique name of the cell - in: body - required: true - type: string -cells: - description: | - An array of cell objects - in: body - required: false - type: array -variables: - description: | - User defined information - in: body - required: false - type: object -device_type: - description: | - Type of host - in: body - required: false - type: string -host: - description: | - A host object - in: body - required: false - type: object -host_id_body: - description: | - Unique ID of the host - in: body - required: false - type: integer -host_name: - description: | - Unique name of the host -hosts: - description: | - An array of host objects - in: body - required: false - type: array -ip_address: - description: | - IP address - in: body - type: string -labels: - description: | - User defined labels - in: body - required: false - type: string -parent_id: - description: | - Parent ID of this host - in: body - required: false - type: integer -project_id: - description: | - ID of the project - in: body - required: true - type: integer -note: - description: | - Note used for governance - in: body - required: false - type: string -region: - description: | - A region object - in: body - required: false - type: object -region_id_body: - description: | - The unique ID of the region - in: body - required: false - type: integer -region_name: - description: | - Unique name of the region - in: body - required: true - type: string -regions: - description: | - An array of region objects - in: body - required: true - type: array -# variables in query -cell_name_query: - description: | - Name of the cell to get - in: query - required: false - type: string -ip_address_query: - description: | - IP address to get - in: query - required: false - type: string -host_id_query: - description: | - ID of the host to get - in: query - required: false - type: integer -host_name_query: - description: | - Name of host to get - in: query - required: false - type: string -limit: - description: | - Number of host to return ranging from 1 - 10000. Default = 1000 - in: query - required: false - type: integer -region_id_query: - description: | - ID of the region to get - in: query - required: false - type: string -region_name_query: - description: | - Name of the the region to get - in: query - required: false - type: string -service: - description: | - Openstack service to query host by - in: query - required: false - type: array diff --git a/api-ref/source/v1/regions.inc b/api-ref/source/v1/regions.inc deleted file mode 100644 index 248c98d..0000000 --- a/api-ref/source/v1/regions.inc +++ /dev/null @@ -1,260 +0,0 @@ -.. -*- rst -*- - -======= -Regions -======= - -Definition of region - -Create Region -============== - -.. rest_method:: POST /v1/region - -Creates a new Region - -Normal response codes: OK(201) - -Error response codes: invalid request(400), validation exception(405) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - name: region_name - - project_id: project_id - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -**Example Create Region** - -..literalinclude:: ../../doc/api_samples/regions/regions-create-req.json - :language: javascript - -Response --------- - - - region: region - - id: region_id_body - - name: region_name - - project_id: project_id - - cells: cells - - variables: variables - -**Example Create Region** - -..literalinclude:: ../../doc/api_samples/regions/regions-create-resp.json - :language: javascript - -List Regions -============== - -.. rest_method:: GET /v1/regions - -Gets all Regions - -Normal response codes: OK(200) - -Error response codes: invalid request(400), validation exception(405) - -Default response: unexpected error - -Request --------- - - - name: region_name_query - - id: region_id_query - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -Response --------- - -.. rest_parameters:: parameters.yaml - - - region: region - - id: region_id_body - - name: region_name - - project_id: project_id - - cells: cells - - variables: variables - -**Example List Regions** - -..literalinclude:: ../../doc/api_samples/regions/regions-list-resp.json - :language: javascript - -**Example Unexpected Error ** - -..literalinclude:: ../../doc/api_samples/errors/errors-unexpected-resp.json - :language: javascript - -Update Region -============= - -.. rest_method:: PUT /v1/regions/{region_id} - -Update an existing region - -Normal response codes: OK(200) - -Error response codes: invalid request(400), region not found(404), validation exception(405) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - id: region_id_body - - name: region_name - - project_id: project_id - - cells: cells - - variables: variables - - region_id: region_id - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -**Example Update Region** (TO-DO) - -..literalinclude:: ../../doc/api_samples/regions/regions-update-req.json - :language: javascript - -Response --------- - - - region: region - - id: region_id_body - - name: region_name - - project_id: project_id - - cells: cells - - variables: variables - -**Example Update Region** (TO-DO) - -..literalinclude:: ../../doc/api_samples/regions/regions-update-resp.json - :language: javascript - -Update Region Data -================== - -.. rest_method:: PUT /v1/regions/{region_id}/variables - -Update user defined variables for the region - -Normal response codes: OK(200) - -Error response codes: invalid request(400), region not found(404), validation exception(405) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - key: key - - value: value - - region_id: region_id - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -**Example Update Region Data** (TO-DO) - -..literalinclude:: ../../doc/api_samples/regions/regions-upadate—data-req.json - :language: javascript - -Response --------- - - - key: key - - value: value - -**Example Update Region Data** (TO-DO) - -..literalinclude:: ../../doc/api_samples/regions/regions-update-data-resp.json - :language: javascript - -Delete Region -============== - -.. rest_method:: DELETE /v1/regions/{region_id} - -Deletes an existing record of a Region - -Normal response codes: OK(200) - -Error response codes: invalid request(400), region not found(404) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - region_id: region_id - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE - -Delete Region Data -================== - -.. rest_method:: DELETE /v1/regions/{region_id}/variables - -Delete existing key/value variables for the region - -Normal response codes: OK(200) - -Error response codes: invalid request(400), region not found(404) validation exception(405) - -Request -------- - -.. rest_parameters:: parameters.yaml - - - region_id: region_id - -Required Header -^^^^^^^^^^^^^^^ - - - Content-Type: Content_Type - - X-Auth-Token: X-Auth-Token - - X-Auth-User: X-Auth-User - - X-Auth-Project: X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE diff --git a/babel.cfg b/babel.cfg deleted file mode 100644 index 15cd6cb..0000000 --- a/babel.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[python: **.py] - diff --git a/bindep.txt b/bindep.txt deleted file mode 100644 index ef4e1a9..0000000 --- a/bindep.txt +++ /dev/null @@ -1 +0,0 @@ -docker.io [platform:dpkg] diff --git a/craton/__init__.py b/craton/__init__.py deleted file mode 100644 index 26400da..0000000 --- a/craton/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import pbr.version - - -__version__ = pbr.version.VersionInfo( - 'craton').version_string() diff --git a/craton/_i18n.py b/craton/_i18n.py deleted file mode 100644 index 0944f11..0000000 --- a/craton/_i18n.py +++ /dev/null @@ -1,19 +0,0 @@ -"""oslo.i18n integration module. - -See http://docs.openstack.org/developer/oslo.i18n/usage.html - -""" - -import oslo_i18n - - -_translators = oslo_i18n.TranslatorFactory(domain='craton') - -# The primary translation function using the well-known name "_" -_ = _translators.primary - -# The contextual translation function using the name "_C" -_C = _translators.contextual_form - -# The plural translation function using the name "_P" -_P = _translators.plural_form diff --git a/craton/api/__init__.py b/craton/api/__init__.py deleted file mode 100644 index 02724e2..0000000 --- a/craton/api/__init__.py +++ /dev/null @@ -1,75 +0,0 @@ -import os -from paste import deploy -from flask import Flask - - -from oslo_config import cfg -from oslo_log import log as logging - -from craton.api import v1 -from craton.util import JSON_KWARGS - - -LOG = logging.getLogger(__name__) - -api_opts = [ - cfg.StrOpt('api_paste_config', - default="api-paste.ini", - help="Configuration file for API service."), - cfg.StrOpt('paste_pipeline', - default="local-auth", - choices=["local-auth", "keystone-auth"], - help="""\ -The name of the Paste pipeline to use for Craton. - -Pipelines are organized according to authentication scheme. The available -choices are: - -- ``local-auth`` (the default) Uses Craton's default authentication and - authorization scheme -- ``keystone-auth`` Uses Keystone for identity, authentication, and - authorization -"""), - cfg.StrOpt('host', - default="127.0.0.1", - help="API host IP"), - cfg.IntOpt('port', - default=5000, - help="API port to use.") -] - -CONF = cfg.CONF -opt_group = cfg.OptGroup(name='api', - title='Craton API service group options') -CONF.register_group(opt_group) -CONF.register_opts(api_opts, opt_group) - - -def create_app(global_config, **local_config): - return setup_app() - - -def setup_app(config=None): - app = Flask(__name__, static_folder=None) - app.config.update( - PROPAGATE_EXCEPTIONS=True, - RESTFUL_JSON=JSON_KWARGS, - ) - app.register_blueprint(v1.bp, url_prefix='/v1') - return app - - -def load_app(): - cfg_file = None - cfg_path = CONF.api.api_paste_config - paste_pipeline = CONF.api.paste_pipeline - if not os.path.isabs(cfg_path): - cfg_file = CONF.find_file(cfg_path) - elif os.path.exists(cfg_path): - cfg_file = cfg_path - - if not cfg_file: - raise cfg.ConfigFilesNotFoundError([cfg.CONF.api.api_paste_config]) - LOG.info("Loading craton-api with pipeline %(pipeline)s and WSGI config:" - "%(conf)s", {'conf': cfg_file, 'pipeline': paste_pipeline}) - return deploy.loadapp("config:%s" % cfg_file, name=paste_pipeline) diff --git a/craton/api/middleware.py b/craton/api/middleware.py deleted file mode 100644 index 1b8919d..0000000 --- a/craton/api/middleware.py +++ /dev/null @@ -1,136 +0,0 @@ -from oslo_middleware import base -from oslo_middleware import request_id -from oslo_context import context -from oslo_log import log -from oslo_utils import uuidutils - -from craton.db import api as dbapi -from craton import exceptions -from craton.util import handle_all_exceptions_decorator - - -LOG = log.getLogger(__name__) - - -class RequestContext(context.RequestContext): - - def __init__(self, **kwargs): - self.using_keystone = kwargs.pop('using_keystone', False) - self.token_info = kwargs.pop('token_info', None) - super(RequestContext, self).__init__(**kwargs) - - -class ContextMiddleware(base.Middleware): - - def make_context(self, request, *args, **kwargs): - req_id = request.environ.get(request_id.ENV_REQUEST_ID) - kwargs.setdefault('request_id', req_id) - - # TODO(sulo): Insert Craton specific context here if needed, - # for now we are using generic context object. - ctxt = RequestContext(*args, **kwargs) - request.environ['context'] = ctxt - return ctxt - - -class NoAuthContextMiddleware(ContextMiddleware): - - def __init__(self, application): - self.application = application - - @handle_all_exceptions_decorator - def process_request(self, request): - # Simply insert some dummy context info - self.make_context( - request, - auth_token='noauth-token', - user='noauth-user', - tenant=None, - is_admin=True, - is_admin_project=True, - ) - - @classmethod - def factory(cls, global_config, **local_config): - def _factory(application): - return cls(application) - - return _factory - - -class LocalAuthContextMiddleware(ContextMiddleware): - - def __init__(self, application): - self.application = application - - @handle_all_exceptions_decorator - def process_request(self, request): - headers = request.headers - project_id = headers.get('X-Auth-Project') - if not uuidutils.is_uuid_like(project_id): - raise exceptions.AuthenticationError( - message="Project ID ('{}') is not a valid UUID".format( - project_id - ) - ) - - ctx = self.make_context( - request, - auth_token=headers.get('X-Auth-Token', None), - user=headers.get('X-Auth-User', None), - tenant=project_id, - ) - - # NOTE(sulo): this means every api call hits the db - # at least once for auth. Better way to handle this? - try: - user_info = dbapi.get_user_info(ctx, - headers.get('X-Auth-User', None)) - if user_info.api_key != headers.get('X-Auth-Token', None): - raise exceptions.AuthenticationError - if user_info.is_root: - ctx.is_admin = True - ctx.is_admin_project = True - elif user_info.is_admin: - ctx.is_admin = True - ctx.is_admin_project = False - else: - ctx.is_admin = False - ctx.is_admin_project = False - except exceptions.NotFound: - raise exceptions.AuthenticationError - - @classmethod - def factory(cls, global_config, **local_config): - def _factory(application): - return cls(application) - return _factory - - -class KeystoneContextMiddleware(ContextMiddleware): - - @handle_all_exceptions_decorator - def process_request(self, request): - headers = request.headers - environ = request.environ - if headers.get('X-Identity-Status', '').lower() != 'confirmed': - raise exceptions.AuthenticationError - - token_info = environ['keystone.token_info']['token'] - roles = (role['name'] for role in token_info['roles']) - self.make_context( - request, - auth_token=headers.get('X-Auth-Token'), - is_admin=any(name == 'admin' for name in roles), - is_admin_project=environ['HTTP_X_IS_ADMIN_PROJECT'], - user=token_info['user']['name'], - tenant=token_info['project']['id'], - using_keystone=True, - token_info=token_info, - ) - - @classmethod - def factory(cls, global_config, **local_config): - def _factory(application): - return cls(application) - return _factory diff --git a/craton/api/v1/__init__.py b/craton/api/v1/__init__.py deleted file mode 100644 index c465243..0000000 --- a/craton/api/v1/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -from flask import Blueprint -import flask_restful as restful - -from craton.api.v1.routes import routes -from craton.util import handle_all_exceptions - - -class CratonApi(restful.Api): - - def error_router(self, _, e): - return self.handle_error(e) - - def handle_error(self, e): - return handle_all_exceptions(e) - - -bp = Blueprint('v1', __name__) -api = CratonApi(bp, catch_all_404s=False) - -for route in routes: - api.add_resource(route.pop('resource'), *route.pop('urls'), **route) diff --git a/craton/api/v1/base.py b/craton/api/v1/base.py deleted file mode 100644 index 6cb6685..0000000 --- a/craton/api/v1/base.py +++ /dev/null @@ -1,85 +0,0 @@ -import functools -import re -import urllib.parse as urllib - -import flask -import flask_restful as restful - -from craton.api.v1.validators import ensure_project_exists -from craton.api.v1.validators import request_validate -from craton.api.v1.validators import response_filter - - -SORT_KEY_SPLITTER = re.compile('[ ,]') - - -class Resource(restful.Resource): - method_decorators = [request_validate, ensure_project_exists, - response_filter] - - -def pagination_context(function): - @functools.wraps(function) - def wrapper(self, context, request_args): - pagination_parameters = { - 'limit': limit_from(request_args), - 'marker': request_args.pop('marker', None), - } - sort_keys = request_args.get('sort_keys') - if sort_keys is not None: - request_args['sort_keys'] = SORT_KEY_SPLITTER.split(sort_keys) - return function(self, context, request_args=request_args, - pagination_params=pagination_parameters) - return wrapper - - -def limit_from(filters, minimum=10, default=30, maximum=100): - """Retrieve the limit from query filters.""" - limit_str = filters.pop('limit', None) - - if limit_str is None: - return default - - limit = int(limit_str) - - # NOTE(sigmavirus24): If our limit falls within in our constraints, just - # return that - if minimum <= limit <= maximum: - return limit - - if limit < minimum: - return minimum - - # NOTE(sigmavirus24): If our limit isn't within the constraints, and it - # isn't too small, then it must be too big. In that case, let's just - # return the maximum. - return maximum - - -def links_from(link_params): - """Generate the list of hypermedia link relations from their parameters. - - This uses the request thread-local to determine the endpoint and generate - URLs from that. - - :param dict link_params: - A dictionary mapping the relation name to the query parameters. - :returns: - List of dictionaries to represent hypermedia link relations. - :rtype: - list - """ - links = [] - relations = ["first", "prev", "self", "next"] - base_url = flask.request.base_url - - for relation in relations: - query_params = link_params.get(relation) - if not query_params: - continue - link_rel = { - "rel": relation, - "href": base_url + "?" + urllib.urlencode(query_params), - } - links.append(link_rel) - return links diff --git a/craton/api/v1/resources/__init__.py b/craton/api/v1/resources/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/craton/api/v1/resources/inventory/__init__.py b/craton/api/v1/resources/inventory/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/craton/api/v1/resources/inventory/ansible_inventory.py b/craton/api/v1/resources/inventory/ansible_inventory.py deleted file mode 100644 index ae2e1ed..0000000 --- a/craton/api/v1/resources/inventory/ansible_inventory.py +++ /dev/null @@ -1,105 +0,0 @@ -from collections import OrderedDict -from operator import attrgetter -from oslo_serialization import jsonutils -from oslo_log import log - -from craton.api.v1 import base -from craton import db as dbapi -from craton import exceptions - - -LOG = log.getLogger(__name__) - - -class AnsibleInventory(base.Resource): - - def get_hierarchy(self, devices): - regions = set() - cells = set() - labels = set() - - for device in devices: - if device.region not in regions: - regions.add(device.region) - - if device.cell: - if device.cell not in cells: - cells.add(device.cell) - for label in device.labels: - if label not in labels: - labels.add(label) - - regions = sorted(regions, key=attrgetter('name')) - cells = sorted(cells, key=attrgetter('name')) - labels = sorted(labels, key=attrgetter('label')) - devices = sorted(devices, key=attrgetter('ip_address')) - return regions, cells, labels, devices - - def generate_ansible_inventory(self, hosts): - """Generate and return Ansible inventory in json format - for hosts given by provided filters. - """ - regions, cells, labels, hosts = self.get_hierarchy(hosts) - hosts_set = set(hosts) - # Set group 'all' and set '_meta' - inventory = OrderedDict( - [('all', {'hosts': []}), - ('_meta', {'hostvars': OrderedDict()})] - ) - - for host in hosts: - ip = str(host.ip_address) - inventory['all']['hosts'].append(ip) - inventory['_meta']['hostvars'][ip] = host.resolved - - def matching_hosts(obj): - return sorted( - [str(device.ip_address) for device in obj.devices - if device in hosts_set]) - - # Group hosts by label - # TODO(sulo): make sure we have a specified label to - # identify host group. Fix this after label refractoring. - for label in labels: - inventory[label.label] = { - 'hosts': matching_hosts(label), - 'vars': label.variables - } - - for cell in cells: - inventory['%s-%s' % (cell.region.name, cell.name)] = { - 'hosts': matching_hosts(cell), - 'vars': cell.variables - } - - for region in regions: - ch = ['%s-%s' % (region.name, cell.name) for cell in region.cells] - inventory['%s' % region.name] = { - 'children': ch, - 'vars': region.variables - } - return inventory - - def get(self, context, request_args): - region_id = request_args["region_id"] - cell_id = request_args["cell_id"] - - filters = {} - if region_id: - filters['region_id'] = region_id - - # TODO(sulo): allow other filters based on services - if cell_id: - filters['cell_id'] = cell_id - - try: - hosts_obj = dbapi.hosts_get_all(context, filters) - except exceptions.NotFound: - return self.error_response(404, 'Not Found') - except Exception as err: - LOG.error("Error during host get: %s" % err) - return self.error_response(500, 'Unknown Error') - - _inventory = self.generate_ansible_inventory(hosts_obj) - inventory = jsonutils.to_primitive(_inventory) - return inventory, 200, None diff --git a/craton/api/v1/resources/inventory/cells.py b/craton/api/v1/resources/inventory/cells.py deleted file mode 100644 index 6fc32b9..0000000 --- a/craton/api/v1/resources/inventory/cells.py +++ /dev/null @@ -1,65 +0,0 @@ -from oslo_serialization import jsonutils -from oslo_log import log - -from craton.api import v1 -from craton.api.v1 import base -from craton.api.v1.resources import utils -from craton import db as dbapi -from craton import util - - -LOG = log.getLogger(__name__) - - -class Cells(base.Resource): - - @base.pagination_context - def get(self, context, request_args, pagination_params): - """Get all cells, with optional filtering.""" - details = request_args.get("details") - - cells_obj, link_params = dbapi.cells_get_all( - context, request_args, pagination_params, - ) - if details: - cells_obj = [utils.get_resource_with_vars(request_args, cell) - for cell in cells_obj] - - links = base.links_from(link_params) - response_body = {'cells': cells_obj, 'links': links} - return jsonutils.to_primitive(response_body), 200, None - - def post(self, context, request_data): - """Create a new cell.""" - json = util.copy_project_id_into_json(context, request_data) - cell_obj = dbapi.cells_create(context, json) - cell = jsonutils.to_primitive(cell_obj) - if 'variables' in json: - cell["variables"] = jsonutils.to_primitive(cell_obj.variables) - else: - cell["variables"] = {} - - location = v1.api.url_for( - CellById, id=cell_obj.id, _external=True - ) - headers = {'Location': location} - - return cell, 201, headers - - -class CellById(base.Resource): - - def get(self, context, id, request_args): - cell_obj = dbapi.cells_get_by_id(context, id) - cell = utils.get_resource_with_vars(request_args, cell_obj) - return cell, 200, None - - def put(self, context, id, request_data): - """Update existing cell.""" - cell_obj = dbapi.cells_update(context, id, request_data) - return jsonutils.to_primitive(cell_obj), 200, None - - def delete(self, context, id): - """Delete existing cell.""" - dbapi.cells_delete(context, id) - return None, 204, None diff --git a/craton/api/v1/resources/inventory/clouds.py b/craton/api/v1/resources/inventory/clouds.py deleted file mode 100644 index 5e9e951..0000000 --- a/craton/api/v1/resources/inventory/clouds.py +++ /dev/null @@ -1,82 +0,0 @@ -from oslo_serialization import jsonutils -from oslo_log import log - -from craton.api import v1 -from craton.api.v1 import base -from craton.api.v1.resources import utils -from craton import db as dbapi -from craton import util - - -LOG = log.getLogger(__name__) - - -class Clouds(base.Resource): - - @base.pagination_context - def get(self, context, request_args, pagination_params): - """Get cloud(s) for the project. Get cloud details if - for a particular cloud. - """ - cloud_id = request_args.get("id") - cloud_name = request_args.get("name") - details = request_args.get("details") - - if not (cloud_id or cloud_name): - # Get all clouds for this project - clouds_obj, link_params = dbapi.clouds_get_all( - context, request_args, pagination_params, - ) - if details: - clouds_obj = [utils.get_resource_with_vars(request_args, c) - for c in clouds_obj] - else: - if cloud_name: - cloud_obj = dbapi.clouds_get_by_name(context, cloud_name) - cloud_obj.data = cloud_obj.variables - - if cloud_id: - cloud_obj = dbapi.clouds_get_by_id(context, cloud_id) - cloud_obj.data = cloud_obj.variables - - clouds_obj = [cloud_obj] - link_params = {} - links = base.links_from(link_params) - response_body = {'clouds': clouds_obj, 'links': links} - return jsonutils.to_primitive(response_body), 200, None - - def post(self, context, request_data): - """Create a new cloud.""" - json = util.copy_project_id_into_json(context, request_data) - cloud_obj = dbapi.clouds_create(context, json) - cloud = jsonutils.to_primitive(cloud_obj) - if 'variables' in json: - cloud["variables"] = jsonutils.to_primitive(cloud_obj.variables) - else: - cloud["variables"] = {} - - location = v1.api.url_for( - CloudsById, id=cloud_obj.id, _external=True - ) - headers = {'Location': location} - - return cloud, 201, headers - - -class CloudsById(base.Resource): - - def get(self, context, id): - cloud_obj = dbapi.clouds_get_by_id(context, id) - cloud = jsonutils.to_primitive(cloud_obj) - cloud['variables'] = jsonutils.to_primitive(cloud_obj.variables) - return cloud, 200, None - - def put(self, context, id, request_data): - """Update existing cloud.""" - cloud_obj = dbapi.clouds_update(context, id, request_data) - return jsonutils.to_primitive(cloud_obj), 200, None - - def delete(self, context, id): - """Delete existing cloud.""" - dbapi.clouds_delete(context, id) - return None, 204, None diff --git a/craton/api/v1/resources/inventory/devices.py b/craton/api/v1/resources/inventory/devices.py deleted file mode 100644 index 339ef9f..0000000 --- a/craton/api/v1/resources/inventory/devices.py +++ /dev/null @@ -1,49 +0,0 @@ -from oslo_serialization import jsonutils -from oslo_log import log - -from craton.api.v1 import base -from craton.api.v1.resources import utils -from craton import exceptions -from craton import db as dbapi -from craton.db.sqlalchemy import models - - -LOG = log.getLogger(__name__) - - -class Devices(base.Resource): - - @base.pagination_context - def get(self, context, request_args, pagination_params): - """Get all devices, with optional filtering.""" - details = request_args.get("details") - device_objs, link_params = dbapi.devices_get_all( - context, request_args, pagination_params, - ) - links = base.links_from(link_params) - - devices = {"hosts": [], "network-devices": []} - for device_obj in device_objs: - if details: - device = utils.get_resource_with_vars(request_args, - device_obj) - else: - device = jsonutils.to_primitive(device_obj) - - utils.add_up_link(context, device) - - if isinstance(device_obj, models.Host): - devices["hosts"].append(device) - elif isinstance(device_obj, models.NetworkDevice): - devices["network-devices"].append(device) - else: - LOG.error( - "The device is of unknown type: '%s'", device_obj - ) - raise exceptions.UnknownException - - response_body = jsonutils.to_primitive( - {'devices': devices, 'links': links} - ) - - return response_body, 200, None diff --git a/craton/api/v1/resources/inventory/hosts.py b/craton/api/v1/resources/inventory/hosts.py deleted file mode 100644 index eb8dd80..0000000 --- a/craton/api/v1/resources/inventory/hosts.py +++ /dev/null @@ -1,104 +0,0 @@ -from oslo_serialization import jsonutils -from oslo_log import log - -from craton.api import v1 -from craton.api.v1 import base -from craton.api.v1.resources import utils -from craton import db as dbapi -from craton import util - - -LOG = log.getLogger(__name__) - - -class Hosts(base.Resource): - - @base.pagination_context - def get(self, context, request_args, pagination_params): - """Get all hosts for region, with optional filtering.""" - details = request_args.get("details") - hosts_obj, link_params = dbapi.hosts_get_all( - context, request_args, pagination_params, - ) - if details: - hosts_obj = [utils.get_resource_with_vars(request_args, h) - for h in hosts_obj] - - links = base.links_from(link_params) - response_body = jsonutils.to_primitive( - {'hosts': hosts_obj, 'links': links} - ) - - for host in response_body["hosts"]: - utils.add_up_link(context, host) - - return response_body, 200, None - - def post(self, context, request_data): - """Create a new host.""" - json = util.copy_project_id_into_json(context, request_data) - host_obj = dbapi.hosts_create(context, json) - host = jsonutils.to_primitive(host_obj) - if 'variables' in json: - host["variables"] = jsonutils.to_primitive(host_obj.variables) - else: - host["variables"] = {} - - utils.add_up_link(context, host) - - location = v1.api.url_for( - HostById, id=host_obj.id, _external=True - ) - headers = {'Location': location} - - return host, 201, headers - - -class HostById(base.Resource): - - def get(self, context, id, request_args): - """Get host by given id""" - host_obj = dbapi.hosts_get_by_id(context, id) - host = utils.get_resource_with_vars(request_args, host_obj) - - utils.add_up_link(context, host) - - return host, 200, None - - def put(self, context, id, request_data): - """Update existing host data, or create if it does not exist.""" - host_obj = dbapi.hosts_update(context, id, request_data) - - host = jsonutils.to_primitive(host_obj) - - utils.add_up_link(context, host) - - return host, 200, None - - def delete(self, context, id): - """Delete existing host.""" - dbapi.hosts_delete(context, id) - return None, 204, None - - -class HostsLabels(base.Resource): - - def get(self, context, id): - """Get labels for given host device.""" - host_obj = dbapi.hosts_get_by_id(context, id) - response = {"labels": list(host_obj.labels)} - return response, 200, None - - def put(self, context, id, request_data): - """ - Update existing device label entirely, or add if it does - not exist. - """ - resp = dbapi.hosts_labels_update(context, id, request_data) - response = {"labels": list(resp.labels)} - return response, 200, None - - def delete(self, context, id, request_data): - """Delete device label entirely.""" - dbapi.hosts_labels_delete(context, id, request_data) - return None, 204, None diff --git a/craton/api/v1/resources/inventory/networks.py b/craton/api/v1/resources/inventory/networks.py deleted file mode 100644 index b502a23..0000000 --- a/craton/api/v1/resources/inventory/networks.py +++ /dev/null @@ -1,210 +0,0 @@ -from oslo_serialization import jsonutils -from oslo_log import log - -from craton.api import v1 -from craton.api.v1 import base -from craton.api.v1.resources import utils -from craton import db as dbapi -from craton import util - - -LOG = log.getLogger(__name__) - - -class Networks(base.Resource): - """Controller for Networks resources.""" - - @base.pagination_context - def get(self, context, request_args, pagination_params): - """Get all networks, with optional filtering.""" - details = request_args.get("details") - networks_obj, link_params = dbapi.networks_get_all( - context, request_args, pagination_params, - ) - if details: - networks_obj = [utils.get_resource_with_vars(request_args, n) - for n in networks_obj] - - links = base.links_from(link_params) - response_body = {'networks': networks_obj, 'links': links} - return jsonutils.to_primitive(response_body), 200, None - - def post(self, context, request_data): - """Create a new network.""" - json = util.copy_project_id_into_json(context, request_data) - network_obj = dbapi.networks_create(context, json) - network = jsonutils.to_primitive(network_obj) - if 'variables' in json: - network["variables"] = jsonutils.to_primitive( - network_obj.variables) - else: - network["variables"] = {} - - location = v1.api.url_for( - NetworkById, id=network_obj.id, _external=True - ) - headers = {'Location': location} - - return network, 201, headers - - -class NetworkById(base.Resource): - """Controller for Networks by ID.""" - - def get(self, context, id): - """Get network by given id""" - obj = dbapi.networks_get_by_id(context, id) - device = jsonutils.to_primitive(obj) - device['variables'] = jsonutils.to_primitive(obj.variables) - return device, 200, None - - def put(self, context, id, request_data): - """Update existing network values.""" - net_obj = dbapi.networks_update(context, id, request_data) - return jsonutils.to_primitive(net_obj), 200, None - - def delete(self, context, id): - """Delete existing network.""" - dbapi.networks_delete(context, id) - return None, 204, None - - -class NetworkDevices(base.Resource): - """Controller for Network Device resources.""" - - @base.pagination_context - def get(self, context, request_args, pagination_params): - """Get all network devices.""" - details = request_args.get("details") - devices_obj, link_params = dbapi.network_devices_get_all( - context, request_args, pagination_params, - ) - if details: - devices_obj = [utils.get_resource_with_vars(request_args, d) - for d in devices_obj] - - links = base.links_from(link_params) - response_body = jsonutils.to_primitive( - {'network_devices': devices_obj, 'links': links} - ) - - for device in response_body["network_devices"]: - utils.add_up_link(context, device) - - return response_body, 200, None - - def post(self, context, request_data): - """Create a new network device.""" - json = util.copy_project_id_into_json(context, request_data) - obj = dbapi.network_devices_create(context, json) - device = jsonutils.to_primitive(obj) - if 'variables' in json: - device["variables"] = jsonutils.to_primitive(obj.variables) - else: - device["variables"] = {} - - utils.add_up_link(context, device) - - location = v1.api.url_for( - NetworkDeviceById, id=obj.id, _external=True - ) - headers = {'Location': location} - - return device, 201, headers - - -class NetworkDeviceById(base.Resource): - """Controller for Network Devices by ID.""" - - def get(self, context, id, request_args): - """Get network device by given id""" - obj = dbapi.network_devices_get_by_id(context, id) - obj = utils.format_variables(request_args, obj) - device = jsonutils.to_primitive(obj) - device['variables'] = jsonutils.to_primitive(obj.vars) - - utils.add_up_link(context, device) - - return device, 200, None - - def put(self, context, id, request_data): - """Update existing device values.""" - net_obj = dbapi.network_devices_update(context, id, request_data) - - device = jsonutils.to_primitive(net_obj) - utils.add_up_link(context, device) - - return device, 200, None - - def delete(self, context, id): - """Delete existing network device.""" - dbapi.network_devices_delete(context, id) - return None, 204, None - - -class NetworkDeviceLabels(base.Resource): - """Controller for Netowrk Device Labels.""" - - def get(self, context, id): - """Get labels for given network device.""" - obj = dbapi.network_devices_get_by_id(context, id) - response = {"labels": list(obj.labels)} - return response, 200, None - - def put(self, context, id, request_data): - """Update existing device label. Adds if it does not exist.""" - resp = dbapi.network_devices_labels_update(context, id, request_data) - response = {"labels": list(resp.labels)} - return response, 200, None - - def delete(self, context, id, request_data): - """Delete device label(s).""" - dbapi.network_devices_labels_delete(context, id, request_data) - return None, 204, None - - -class NetworkInterfaces(base.Resource): - """Controller for Netowrk Interfaces.""" - - @base.pagination_context - def get(self, context, request_args, pagination_params): - """Get all network interfaces.""" - interfaces_obj, link_params = dbapi.network_interfaces_get_all( - context, request_args, pagination_params, - ) - links = base.links_from(link_params) - response_body = {'network_interfaces': interfaces_obj, 'links': links} - return jsonutils.to_primitive(response_body), 200, None - - def post(self, context, request_data): - """Create a new network interface.""" - json = util.copy_project_id_into_json(context, request_data) - obj = dbapi.network_interfaces_create(context, json) - interface = jsonutils.to_primitive(obj) - - location = v1.api.url_for( - NetworkInterfaceById, id=obj.id, _external=True - ) - headers = {'Location': location} - - return interface, 201, headers - - -class NetworkInterfaceById(base.Resource): - - def get(self, context, id): - """Get network interface by given id""" - obj = dbapi.network_interfaces_get_by_id(context, id) - interface = jsonutils.to_primitive(obj) - interface['variables'] = jsonutils.to_primitive(obj.variables) - return interface, 200, None - - def put(self, context, id, request_data): - """Update existing network interface values.""" - net_obj = dbapi.network_interfaces_update(context, id, request_data) - return jsonutils.to_primitive(net_obj), 200, None - - def delete(self, context, id): - """Delete existing network interface.""" - dbapi.network_interfaces_delete(context, id) - return None, 204, None diff --git a/craton/api/v1/resources/inventory/regions.py b/craton/api/v1/resources/inventory/regions.py deleted file mode 100644 index 68e36de..0000000 --- a/craton/api/v1/resources/inventory/regions.py +++ /dev/null @@ -1,81 +0,0 @@ -from oslo_serialization import jsonutils -from oslo_log import log - -from craton.api import v1 -from craton.api.v1 import base -from craton.api.v1.resources import utils -from craton import db as dbapi -from craton import util - - -LOG = log.getLogger(__name__) - - -class Regions(base.Resource): - - @base.pagination_context - def get(self, context, request_args, pagination_params): - """Get region(s) for the project. Get region details if - for a particular region. - """ - region_id = request_args.get("id") - region_name = request_args.get("name") - details = request_args.get("details") - - if not (region_id or region_name): - # Get all regions for this tenant - regions_obj, link_params = dbapi.regions_get_all( - context, request_args, pagination_params, - ) - if details: - regions_obj = [utils.get_resource_with_vars(request_args, r) - for r in regions_obj] - else: - if region_name: - region_obj = dbapi.regions_get_by_name(context, region_name) - region_obj.data = region_obj.variables - - if region_id: - region_obj = dbapi.regions_get_by_id(context, region_id) - region_obj.data = region_obj.variables - - regions_obj = [region_obj] - link_params = {} - links = base.links_from(link_params) - response_body = {'regions': regions_obj, 'links': links} - return jsonutils.to_primitive(response_body), 200, None - - def post(self, context, request_data): - """Create a new region.""" - json = util.copy_project_id_into_json(context, request_data) - region_obj = dbapi.regions_create(context, json) - region = jsonutils.to_primitive(region_obj) - if 'variables' in json: - region["variables"] = jsonutils.to_primitive(region_obj.variables) - else: - region["variables"] = {} - - location = v1.api.url_for( - RegionsById, id=region_obj.id, _external=True - ) - headers = {'Location': location} - - return region, 201, headers - - -class RegionsById(base.Resource): - - def get(self, context, id, request_args): - region_obj = dbapi.regions_get_by_id(context, id) - region = utils.get_resource_with_vars(request_args, region_obj) - return region, 200, None - - def put(self, context, id, request_data): - """Update existing region.""" - region_obj = dbapi.regions_update(context, id, request_data) - return jsonutils.to_primitive(region_obj), 200, None - - def delete(self, context, id): - """Delete existing region.""" - dbapi.regions_delete(context, id) - return None, 204, None diff --git a/craton/api/v1/resources/projects.py b/craton/api/v1/resources/projects.py deleted file mode 100644 index 9f2236b..0000000 --- a/craton/api/v1/resources/projects.py +++ /dev/null @@ -1,67 +0,0 @@ -from oslo_serialization import jsonutils -from oslo_log import log - -from craton.api import v1 -from craton.api.v1 import base -from craton.api.v1.resources import utils -from craton import db as dbapi - - -LOG = log.getLogger(__name__) - - -class Projects(base.Resource): - - @base.pagination_context - def get(self, context, request_args, pagination_params): - """Get all projects. Requires super admin privileges.""" - project_name = request_args["name"] - details = request_args.get("details") - - if project_name: - projects_obj, link_params = dbapi.projects_get_by_name( - context, project_name, request_args, pagination_params, - ) - else: - projects_obj, link_params = dbapi.projects_get_all( - context, request_args, pagination_params, - ) - if details: - projects_obj = [utils.get_resource_with_vars(request_args, p) - for p in projects_obj] - - links = base.links_from(link_params) - response_body = {'projects': projects_obj, 'links': links} - return jsonutils.to_primitive(response_body), 200, None - - def post(self, context, request_data): - """Create a new project. Requires super admin privileges.""" - project_obj = dbapi.projects_create(context, request_data) - - location = v1.api.url_for( - ProjectById, id=project_obj.id, _external=True - ) - headers = {'Location': location} - - project = jsonutils.to_primitive(project_obj) - if 'variables' in request_data: - project["variables"] = \ - jsonutils.to_primitive(project_obj.variables) - else: - project["variables"] = {} - return project, 201, headers - - -class ProjectById(base.Resource): - - def get(self, context, id): - """Get a project details by id. Requires super admin privileges.""" - project_obj = dbapi.projects_get_by_id(context, id) - project = jsonutils.to_primitive(project_obj) - project['variables'] = jsonutils.to_primitive(project_obj.variables) - return project, 200, None - - def delete(self, context, id): - """Delete existing project. Requires super admin privileges.""" - dbapi.projects_delete(context, id) - return None, 204, None diff --git a/craton/api/v1/resources/users.py b/craton/api/v1/resources/users.py deleted file mode 100644 index 49ce8be..0000000 --- a/craton/api/v1/resources/users.py +++ /dev/null @@ -1,68 +0,0 @@ -from oslo_serialization import jsonutils -from oslo_log import log -from oslo_utils import uuidutils - -from craton.api import v1 -from craton.api.v1 import base -from craton import db as dbapi - - -LOG = log.getLogger(__name__) - - -class Users(base.Resource): - - @base.pagination_context - def get(self, context, request_args, pagination_params): - """Get all users. Requires project admin privileges.""" - user_id = request_args["id"] - user_name = request_args["name"] - - if user_id: - user_obj = dbapi.users_get_by_id(context, user_id) - user_obj.data = user_obj.variables - users_obj = [user_obj] - link_params = {} - - if user_name: - users_obj, link_params = dbapi.users_get_by_name( - context, user_name, request_args, pagination_params, - ) - else: - users_obj, link_params = dbapi.users_get_all( - context, request_args, pagination_params, - ) - links = base.links_from(link_params) - response_body = {'users': users_obj, 'links': links} - return jsonutils.to_primitive(response_body), 200, None - - def post(self, context, request_data): - """Create a new user. Requires project admin privileges.""" - # NOTE(sulo): Instead of using context project_id from - # header, here we always ensure, user create gets project_id - # from request param. - project_id = request_data["project_id"] - dbapi.projects_get_by_id(context, project_id) - api_key = uuidutils.generate_uuid() - request_data["api_key"] = api_key - user_obj = dbapi.users_create(context, request_data) - - location = v1.api.url_for( - UserById, id=user_obj.id, _external=True - ) - headers = {'Location': location} - - return jsonutils.to_primitive(user_obj), 201, headers - - -class UserById(base.Resource): - - def get(self, context, id): - """Get a user details by id. Requires project admin privileges.""" - user_obj = dbapi.users_get_by_id(context, id) - return jsonutils.to_primitive(user_obj), 200, None - - def delete(self, context, id): - """Delete existing user. Requires project admin privileges.""" - dbapi.users_delete(context, id) - return None, 204, None diff --git a/craton/api/v1/resources/utils.py b/craton/api/v1/resources/utils.py deleted file mode 100644 index 31c4f82..0000000 --- a/craton/api/v1/resources/utils.py +++ /dev/null @@ -1,69 +0,0 @@ -import binascii -import os -from flask import url_for -from oslo_serialization import jsonutils - -from craton import db as dbapi - - -def format_variables(args, obj): - """Update resource response with requested type of variables.""" - if args: - resolved_values = args.get("resolved-values", None) - else: - resolved_values = None - - if resolved_values: - obj.vars = obj.resolved - else: - obj.vars = obj.variables - return obj - - -def get_resource_with_vars(args, obj): - """Get resource in json primitive with variables.""" - obj = format_variables(args, obj) - res = jsonutils.to_primitive(obj) - res['variables'] = jsonutils.to_primitive(obj.vars) - return res - - -def get_device_type(context, device_id): - device = dbapi.resource_get_by_id(context, "devices", device_id) - return device.type - - -def get_resource_url(resource_type, resource_id): - resources = { - "cells": "v1.cells_id", - "hosts": "v1.hosts_id", - "network_devices": "v1.network_devices_id", - "regions": "v1.regions_id", - } - return url_for(resources[resource_type], id=resource_id, _external=True) - - -def add_up_link(context, device): - if device["parent_id"]: - device_type = get_device_type(context, device["parent_id"]) - link_url = get_resource_url(device_type, device["parent_id"]) - elif device["cell_id"]: - link_url = get_resource_url("cells", device["cell_id"]) - else: - link_url = get_resource_url("regions", device["region_id"]) - - link = { - "href": link_url, - "rel": "up", - } - - links = device.setdefault("links", []) - links.append(link) - - -def gen_api_key(): - """Generates crypto strong 16 bytes api key.""" - # NOTE(sulo): this implementation is taken from secrets - # moudule available in python 3.6 - tbytes = os.urandom(16) - return binascii.hexlify(tbytes).decode('ascii') diff --git a/craton/api/v1/resources/variables.py b/craton/api/v1/resources/variables.py deleted file mode 100644 index 526e78c..0000000 --- a/craton/api/v1/resources/variables.py +++ /dev/null @@ -1,43 +0,0 @@ -from oslo_serialization import jsonutils -from oslo_log import log - - -from craton.api.v1 import base -from craton.api.v1.resources import utils -from craton import db as dbapi - - -# NOTE(thomasem): LOG must exist for craton.api.v1.base module to introspect -# and execute this modules LOG. -LOG = log.getLogger(__name__) - - -class Variables(base.Resource): - - def get(self, context, resources, id, request_args=None): - """Get variables for given resource.""" - obj = dbapi.resource_get_by_id(context, resources, id) - obj = utils.format_variables(request_args, obj) - resp = {"variables": jsonutils.to_primitive(obj.vars)} - return resp, 200, None - - def put(self, context, resources, id, request_data): - """ - Update existing resource variables, or create if it does - not exist. - """ - obj = dbapi.variables_update_by_resource_id( - context, resources, id, request_data - ) - resp = {"variables": jsonutils.to_primitive(obj.variables)} - return resp, 200, None - - def delete(self, context, resources, id, request_data): - """Delete resource variables.""" - # NOTE(sulo): this is not that great. Find a better way to do this. - # We can pass multiple keys suchs as key1=one key2=two etc. but not - # the best way to do this. - dbapi.variables_delete_by_resource_id( - context, resources, id, request_data - ) - return None, 204, None diff --git a/craton/api/v1/routes.py b/craton/api/v1/routes.py deleted file mode 100644 index 6f777f7..0000000 --- a/craton/api/v1/routes.py +++ /dev/null @@ -1,93 +0,0 @@ -from craton.api.v1.resources import users -from craton.api.v1.resources import projects -from craton.api.v1.resources import variables - -from craton.api.v1.resources.inventory import ansible_inventory -from craton.api.v1.resources.inventory import cells -from craton.api.v1.resources.inventory import clouds -from craton.api.v1.resources.inventory import devices -from craton.api.v1.resources.inventory import hosts -from craton.api.v1.resources.inventory import regions -from craton.api.v1.resources.inventory import networks - - -VARS_RESOLVE = ", ".join(map(repr, ("hosts", ))) -VARS_NOT_RESOLVE = ", ".join( - map(repr, ("network-devices", "cells", "regions", "networks", "projects", - "clouds")) -) - -routes = [ - dict(resource=ansible_inventory.AnsibleInventory, - urls=['/ansible-inventory'], - endpoint='ansible_inventory'), - dict(resource=devices.Devices, - urls=['/devices'], - endpoint='devices'), - dict(resource=hosts.HostsLabels, - urls=['/hosts//labels'], - endpoint='hosts_labels'), - dict(resource=hosts.HostById, - urls=['/hosts/'], - endpoint='hosts_id'), - dict(resource=hosts.Hosts, - urls=['/hosts'], - endpoint='hosts'), - dict(resource=regions.Regions, - urls=['/regions'], - endpoint='regions'), - dict(resource=regions.RegionsById, - urls=['/regions/'], - endpoint='regions_id'), - dict(resource=clouds.Clouds, - urls=['/clouds'], - endpoint='clouds'), - dict(resource=clouds.CloudsById, - urls=['/clouds/'], - endpoint='clouds_id'), - dict(resource=cells.CellById, - urls=['/cells/'], - endpoint='cells_id'), - dict(resource=cells.Cells, - urls=['/cells'], - endpoint='cells'), - dict(resource=projects.Projects, - urls=['/projects'], - endpoint='projects'), - dict(resource=projects.ProjectById, - urls=['/projects/'], - endpoint='projects_id'), - dict(resource=users.Users, - urls=['/users'], - endpoint='users'), - dict(resource=users.UserById, - urls=['/users/'], - endpoint='users_id'), - dict(resource=networks.Networks, - urls=['/networks'], - endpoint='networks'), - dict(resource=networks.NetworkById, - urls=['/networks/'], - endpoint='networks_id'), - dict(resource=networks.NetworkInterfaces, - urls=['/network-interfaces'], - endpoint='network_interfaces'), - dict(resource=networks.NetworkInterfaceById, - urls=['/network-interfaces/'], - endpoint='network_interfaces_id'), - dict(resource=networks.NetworkDevices, - urls=['/network-devices'], - endpoint='network_devices'), - dict(resource=networks.NetworkDeviceById, - urls=['/network-devices/'], - endpoint='network_devices_id'), - dict(resource=networks.NetworkDeviceLabels, - urls=['/network-devices//labels'], - endpoint='network_devices_labels'), - dict(resource=variables.Variables, - urls=['///variables'.format(VARS_RESOLVE)], - endpoint='variables_with_resolve'), - dict(resource=variables.Variables, - urls=['///variables'.format(VARS_NOT_RESOLVE)], - endpoint='variables_without_resolve'), -] diff --git a/craton/api/v1/schemas.py b/craton/api/v1/schemas.py deleted file mode 100644 index b6f53f6..0000000 --- a/craton/api/v1/schemas.py +++ /dev/null @@ -1,1854 +0,0 @@ -import copy - -DefinitionVariablesSource = { - "type": "object", - "additionalProperties": False, - "patternProperties": { - "^.+": { - "anyOf": [ - { - "type": "string", - }, - { - "type": "null", - }, - { - "type": "number", - }, - { - "type": "boolean", - }, - { - "type": "integer", - }, - { - "type": "array", - }, - { - "type": "object", - }, - ], - }, - }, -} - -DefinitionDeleteVariables = { - "type": "array", - "items": {"type": "string"}, -} - -DefinitionLinks = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "required": [ - "href", - "rel", - ], - "properties": { - "href": { - "type": "string", - }, - "rel": { - "type": "string", - } - } - } -} - -# These are properties that should be excluded in any POST call -# such that a resource can not be created with these in request body. -blacklisted_create_properties = ["id", "created_at", "updated_at"] - -# Blacklisted create properties with project_id addition -blacklisted_with_project_id = blacklisted_create_properties + ["project_id"] - - -def _remove_properties(properties, remove_list): - props = copy.copy(properties) - for prop in remove_list: - props.pop(prop) - return props - -DefinitionsLabel = { - "type": "object", - "additionalProperties": False, - "properties": { - "labels": { - "type": "array", - "items": { - "type": "string", - }, - }, - }, -} - -DefinitionsError = { - "type": "object", - "additionalProperties": False, - "properties": { - "fields": { - "type": "string", - }, - "message": { - "type": "string", - }, - "code": { - "type": "integer", - "format": "int32", - }, - }, -} - -HostProperties = { - "created_at": { - "type": "string", - }, - "updated_at": { - "type": "string", - }, - "active": { - "type": "boolean", - }, - "note": { - "type": "string", - }, - "ip_address": { - "type": "string", - }, - "name": { - "type": "string", - }, - "id": { - "type": "integer", - }, - "cell_id": { - "type": "integer", - }, - "project_id": { - "type": "string", - }, - "parent_id": { - "type": "integer", - "description": "Parent Id of this host", - }, - "device_type": { - "type": "string", - "description": "Type of host", - }, - "labels": { - "type": "array", - "items": { - "type": "string", - }, - "description": "User defined labels", - }, - "region_id": { - "type": "integer", - }, - "cloud_id": { - "type": "integer", - }, - "variables": DefinitionVariablesSource, - "links": DefinitionLinks, -} - -DefinitionsHost = { - "required": [ - "name", - "cloud_id", - "region_id", - "ip_address", - "device_type", - ], - "type": "object", - "additionalProperties": False, - "properties": HostProperties, -} - -DefinitionsHostId = { - "type": "object", - "additionalProperties": False, - "properties": HostProperties, -} - -DefinitionHostCreate = { - "required": [ - "name", - "cloud_id", - "region_id", - "ip_address", - "device_type", - ], - "type": "object", - "additionalProperties": False, - "properties": _remove_properties(HostProperties, - blacklisted_with_project_id), -} - -CellProperties = { - "created_at": { - "type": "string", - }, - "updated_at": { - "type": "string", - }, - "note": { - "type": "string", - }, - "name": { - "type": "string", - }, - "region_id": { - "type": "integer", - }, - "cloud_id": { - "type": "integer", - }, - "project_id": { - "type": "string", - }, - "id": { - "type": "integer", - "description": "Unique ID of the cell", - }, - "variables": DefinitionVariablesSource, -} - -DefinitionsCell = { - "required": [ - "name", - "cloud_id", - "region_id", - ], - "type": "object", - "additionalProperties": False, - "properties": CellProperties, -} - -DefinitionsCellId = { - "type": "object", - "additionalProperties": False, - "properties": CellProperties, -} - -DefinitionsCellCreate = { - "required": [ - "name", - "cloud_id", - "region_id", - ], - "type": "object", - "additionalProperties": False, - "properties": _remove_properties(CellProperties, - blacklisted_with_project_id), -} - -RegionProperties = { - "created_at": { - "type": "string", - }, - "updated_at": { - "type": "string", - }, - "note": { - "type": "string", - "description": "Region Note", - }, - "name": { - "type": "string", - "description": "Region Name", - }, - "cells": { - "items": DefinitionsCell, - "type": "array", - "description": "List of cells in this region", - }, - "project_id": { - "type": "string", - }, - "cloud_id": { - "type": "integer", - }, - "id": { - "type": "integer", - "description": "Unique ID for the region", - }, - "variables": DefinitionVariablesSource, -} - -DefinitionsRegion = { - "required": [ - "name", - "cloud_id", - ], - "type": "object", - "additionalProperties": False, - "properties": RegionProperties, -} - -DefinitionsRegionId = { - "type": "object", - "additionalProperties": False, - "properties": RegionProperties, -} - -DefinitionsRegionCreate = { - "required": [ - "name", - "cloud_id", - ], - "type": "object", - "additionalProperties": False, - "properties": _remove_properties(RegionProperties, - blacklisted_with_project_id), -} - -CloudProperties = { - "created_at": { - "type": "string", - }, - "updated_at": { - "type": "string", - }, - "note": { - "type": "string", - "description": "Cloud Note", - }, - "name": { - "type": "string", - "description": "Cloud Name", - }, - "regions": { - "items": DefinitionsRegion, - "type": "array", - "description": "List of regions in this cloud", - }, - "project_id": { - "type": "string", - }, - "id": { - "type": "integer", - "description": "Unique ID for the cloud", - }, - "variables": DefinitionVariablesSource, -} - -DefinitionsCloud = { - "required": [ - "name", - ], - "type": "object", - "additionalProperties": False, - "properties": CloudProperties, -} - -DefinitionsCloudId = { - "type": "object", - "additionalProperties": False, - "properties": CloudProperties, -} - -DefinitionsCloudCreate = { - "required": [ - "name", - ], - "type": "object", - "additionalProperties": False, - "properties": _remove_properties(CloudProperties, - blacklisted_with_project_id), -} - -UserProperties = { - "created_at": { - "type": "string", - }, - "updated_at": { - "type": "string", - }, - "id": { - "type": "integer", - }, - "api_key": { - "type": "string", - }, - "username": { - "type": "string", - }, - "is_admin": { - "type": "boolean", - }, - "project_id": { - "type": "string", - }, - "roles": { - "type": "array", - "items": { - "type": "string", - }, - }, -} - -DefinitionUser = { - "type": "object", - "additionalProperties": False, - "properties": UserProperties, -} - -DefinitionUserCreate = { - "required": [ - "username", - "project_id", - ], - "type": "object", - "additionalProperties": False, - "properties": _remove_properties(UserProperties, - blacklisted_create_properties), -} - -ProjectProperties = { - "created_at": { - "type": "string", - }, - "updated_at": { - "type": "string", - }, - "id": { - "type": "string", - }, - "name": { - "type": "string", - }, - "variables": DefinitionVariablesSource, -} - -DefinitionProject = { - "type": "object", - "additionalProperties": False, - "properties": ProjectProperties, -} - -DefinitionProjectCreate = { - "required": [ - "name", - ], - "type": "object", - "additionalProperties": False, - "properties": _remove_properties(ProjectProperties, - blacklisted_create_properties), -} - -NetworkProperties = { - "created_at": { - "type": "string", - }, - "updated_at": { - "type": "string", - }, - "id": { - "type": "integer", - }, - "region_id": { - "type": "integer", - }, - "cloud_id": { - "type": "integer", - }, - "cell_id": { - "type": "integer", - }, - "project_id": { - "type": "string", - }, - "name": { - "type": "string", - }, - "cidr": { - "type": "string", - }, - "gateway": { - "type": "string", - }, - "netmask": { - "type": "string", - }, - "ip_block_type": { - "type": "string", - }, - "nss": { - "type": "string", - }, - "variables": DefinitionVariablesSource, -} - -DefinitionNetwork = { - "required": [ - "name", - "cidr", - "gateway", - "netmask", - "cloud_id", - "region_id", - ], - "type": "object", - "additionalProperties": False, - "properties": NetworkProperties, -} - -DefinitionNetworkId = { - "type": "object", - "additionalProperties": False, - "properties": NetworkProperties, -} - -DefinitionNetworkCreate = { - "required": [ - "name", - "cidr", - "gateway", - "netmask", - "cloud_id", - "region_id", - ], - "type": "object", - "additionalProperties": False, - "properties": _remove_properties(NetworkProperties, - blacklisted_with_project_id), -} - - -NetworkInterfaceProperties = { - "created_at": { - "type": "string", - }, - "updated_at": { - "type": "string", - }, - "id": { - "type": "integer", - }, - "name": { - "type": "string", - }, - "device_id": { - "type": "integer", - "default": None, - }, - "network_id": { - "type": "integer", - "default": None, - }, - "interface_type": { - "type": "string", - }, - "project_id": { - "type": "string", - }, - "vlan_id": { - "type": "integer", - }, - "vlan": { - "type": "string", - }, - "port": { - "type": "integer", - }, - "duplex": { - "type": "string", - }, - "speed": { - "type": "integer", - }, - "link": { - "type": "string", - }, - "cdp": { - "type": "string", - }, - "security": { - "type": "string", - }, - "ip_address": { - "type": "string", - }, - "variables": DefinitionVariablesSource, -} - -DefinitionNetworkInterface = { - "required": [ - "name", - "device_id", - "interface_type", - "ip_address", - ], - "type": "object", - "additionalProperties": False, - "properties": NetworkInterfaceProperties, -} - -DefinitionNetworkInterfaceId = { - "type": "object", - "additionalProperties": False, - "properties": NetworkInterfaceProperties, -} - - -DefinitionNetworkInterfaceCreate = { - "required": [ - "name", - "device_id", - "interface_type", - "ip_address", - ], - "type": "object", - "additionalProperties": False, - "properties": _remove_properties(NetworkInterfaceProperties, - blacklisted_with_project_id), -} - -NetworkDeviceProperties = { - "created_at": { - "type": "string", - }, - "updated_at": { - "type": "string", - }, - "id": { - "type": "integer", - }, - "region_id": { - "type": "integer", - }, - "cloud_id": { - "type": "integer", - }, - "cell_id": { - "type": "integer", - }, - "parent_id": { - "type": "integer", - }, - "project_id": { - "type": "string", - }, - "ip_address": { - "type": "string", - }, - "device_type": { - "type": "string", - }, - "active": { - "type": "boolean", - }, - "name": { - "type": "string", - }, - "access_secret_id": { - "type": "integer", - }, - "model_name": { - "type": "string", - }, - "os_version": { - "type": "string", - }, - "vlans": { - "type": "string", - }, - "interface_id": { - "type": "integer", - }, - "network_id": { - "type": "integer", - }, - "variables": DefinitionVariablesSource, - "links": DefinitionLinks, -} - -DefinitionNetworkDevice = { - "required": [ - "name", - "cloud_id", - "region_id", - "device_type", - "ip_address", - ], - "type": "object", - "additionalProperties": False, - "properties": NetworkDeviceProperties, -} - -DefinitionNetworkDeviceId = { - "type": "object", - "additionalProperties": False, - "properties": NetworkDeviceProperties, -} - -DefinitionNetworkDeviceCreate = { - "required": [ - "name", - "cloud_id", - "region_id", - "device_type", - "ip_address", - ], - "type": "object", - "additionalProperties": False, - "properties": _remove_properties(NetworkDeviceProperties, - blacklisted_with_project_id), -} - -DefinitionNoParams = { - "type": "object", - "properties": {}, - "maxProperties": 0, - "additionalProperties": False, -} - -DefinitionsPaginationLinks = { - "type": "array", - "items": { - "type": "object", - "properties": { - "rel": { - "type": "string", - "enum": ["first", "prev", "self", "next"], - "description": ("Relation of the associated URL to the current" - " page"), - }, - "href": { - "type": "string", - }, - }, - }, -} - - -def add_pagination_args(resource, args, - minimum_page_size=10, - default_page_size=30, - maximum_page_size=100, - marker_type="integer"): - args.update({ - "limit": { - "minimum": minimum_page_size, - "default": default_page_size, - "maximum": maximum_page_size, - "type": "integer", - "description": "Number of {}s to return in a page".format( - resource, - ), - }, - "marker": { - "type": marker_type, - "description": "Last {} ID of the previous page".format( - resource, - ), - }, - "sort_dir": { - "type": "string", - "enum": ["asc", "desc"], - "description": ("Direction to sort the {}s based on keys " - "specified to sort on.").format(resource), - }, - "sort_keys": { - "type": "string", - "description": "Keys used to sort the {}s by.".format(resource), - }, - }) - return args - - -def paginated_resource(list_name, schema): - return { - "type": "object", - "additionalProperties": False, - "properties": { - list_name: { - "type": "array", - "items": schema, - }, - "links": DefinitionsPaginationLinks, - }, - } - - -DefinitionDevicesPaginated = { - "type": "object", - "additionalProperties": False, - "properties": { - "devices": { - "type": "object", - "properties": { - "hosts": { - "type": "array", - "items": DefinitionsHost, - }, - "network-devices": { - "type": "array", - "items": DefinitionNetworkDeviceId, - }, - }, - }, - "links": DefinitionsPaginationLinks, - }, -} - -validators = { - ("ansible_inventory", "GET"): { - "args": { - "additionalProperties": False, - "properties": { - "region_id": { - "default": None, - "type": "string", - "description": "Region to generate inventory for", - }, - "cell_id": { - "default": None, - "type": "string", - "description": "Cell id to generate inventory for", - }, - }, - }, - }, - ("devices", "GET"): { - "args": { - "type": "object", - "additionalProperties": False, - "properties": add_pagination_args("devices", { - "region_id": { - "type": "integer", - }, - "cloud_id": { - "type": "integer", - }, - "cell_id": { - "type": "integer", - }, - "parent_id": { - "type": "integer", - }, - "active": { - "type": "boolean", - }, - "descendants": { - "default": False, - "type": "boolean", - }, - "resolved-values": { - "default": True, - "type": "boolean", - }, - "details": { - "default": False, - "type": "boolean", - }, - }), - }, - }, - ("hosts_labels", "PUT"): { - "json": DefinitionsLabel, - }, - ("hosts_labels", "GET"): { - "args": DefinitionNoParams, - }, - ("hosts_labels", "DELETE"): { - "json": DefinitionsLabel, - }, - ("hosts_id", "DELETE"): { - }, - ("hosts_id", "GET"): { - "args": { - "additionalProperties": False, - "properties": { - "resolved-values": { - "default": True, - "type": "boolean", - }, - }, - }, - }, - ("hosts_id", "PUT"): { - "json": { - "additionalProperties": False, - "properties": { - "active": { - "type": "boolean", - }, - "note": { - "type": "string", - }, - "ip_address": { - "type": "string", - }, - "name": { - "type": "string", - }, - "device_type": { - "type": "string", - "description": "Type of host", - }, - "parent_id": { - "anyOf": [ - { - "type": "integer", - }, - { - "type": "null", - }, - ], - "description": "Parent Id of this host", - }, - }, - }, - }, - ("regions", "GET"): { - "args": { - "additionalProperties": False, - "properties": add_pagination_args("region", { - "name": { - "type": "string", - "description": "name of the region to get", - }, - "details": { - "type": "boolean", - "description": "get detailed information" - }, - "cloud_id": { - "type": "integer", - "description": "ID of the cloud to get regions", - }, - "vars": { - "type": "string", - "description": "variable filters to get a region", - }, - "id": { - "type": "integer", - "description": "ID of the region to get", - }, - "resolved-values": { - "default": True, - "type": "boolean", - }, - }), - }, - }, - ("regions", "POST"): { - "json": DefinitionsRegionCreate, - }, - ("clouds", "GET"): { - "args": { - "additionalProperties": False, - "properties": add_pagination_args("cloud", { - "name": { - "type": "string", - "description": "name of the cloud to get", - }, - "vars": { - "type": "string", - "description": "variable filters to get a cloud", - }, - "id": { - "type": "integer", - "description": "ID of the cloud to get", - }, - "details": { - "default": False, - "type": "boolean", - }, - }), - }, - }, - ("clouds", "POST"): { - "json": DefinitionsCloudCreate, - }, - ("hosts", "POST"): { - "json": DefinitionHostCreate, - }, - ("hosts", "GET"): { - "args": { - "additionalProperties": False, - "properties": add_pagination_args("host", { - "name": { - "type": "string", - "description": "name of the hosts to get", - }, - "details": { - "type": "boolean", - "description": "get detailed information", - }, - "region_id": { - "type": "integer", - "description": "ID of the region to get hosts", - }, - "cloud_id": { - "type": "integer", - "description": "ID of the cloud to get hosts", - }, - "cell_id": { - "type": "integer", - "description": "ID of the cell to get hosts", - }, - "device_type": { - "type": "string", - "description": "Type of host to get", - }, - "label": { - "type": "string", - "description": "label to get host by", - }, - "ip_address": { - "type": "string", - "description": "ip_address of the hosts to get", - }, - "vars": { - "type": "string", - "description": "variable filters to get a host", - }, - "id": { - "type": "integer", - "description": "ID of host to get", - }, - "resolved-values": { - "default": True, - "type": "boolean", - }, - }), - }, - }, - ("cells_id", "DELETE"): { - }, - ("cells_id", "GET"): { - "args": { - "additionalProperties": False, - "properties": { - "resolved-values": { - "default": True, - "type": "boolean", - }, - }, - }, - }, - ("cells_id", "PUT"): { - "json": { - "additionalProperties": False, - "properties": { - "note": { - "type": "string", - }, - "name": { - "type": "string", - }, - }, - }, - }, - ("cells", "POST"): { - "json": DefinitionsCellCreate, - }, - ("cells", "GET"): { - "args": { - "additionalProperties": False, - "properties": add_pagination_args("cell", { - "region_id": { - "type": "string", - "description": "name of the region to get cells for", - }, - "cloud_id": { - "type": "integer", - "description": "ID of the cloud to get cells", - }, - "id": { - "type": "integer", - "description": "id of the cell to get", - }, - "vars": { - "type": "string", - "description": "variable filters to get a cell", - }, - "details": { - "type": "boolean", - "description": "get detailed information", - }, - "name": { - "type": "string", - "description": "name of the cell to get", - }, - "resolved-values": { - "default": True, - "type": "boolean", - }, - }), - }, - }, - ("regions_id", "DELETE"): { - }, - ("regions_id", "GET"): { - "args": { - "additionalProperties": False, - "properties": { - "resolved-values": { - "default": True, - "type": "boolean", - }, - }, - }, - }, - ("regions_id", "PUT"): { - "json": { - "additionalProperties": False, - "properties": { - "name": { - "type": "string", - }, - "note": { - "type": "string", - }, - }, - }, - }, - ("clouds_id", "DELETE"): { - }, - ("clouds_id", "GET"): { - "args": DefinitionNoParams, - }, - ("clouds_id", "PUT"): { - "json": { - "additionalProperties": False, - "properties": { - "name": { - "type": "string", - }, - "note": { - "type": "string", - }, - }, - }, - }, - ("projects", "GET"): { - "args": { - "additionalProperties": False, - "properties": add_pagination_args("project", { - "name": { - "default": None, - "type": "string", - "description": "name of the project to get", - }, - "vars": { - "type": "string", - "description": "variable filters to get a project", - }, - "details": { - "default": False, - "type": "boolean", - }, - }, marker_type="string"), - }, - }, - ("projects", "POST"): { - "json": DefinitionProjectCreate, - }, - ("projects_id", "DELETE"): { - }, - ("projects_id", "GET"): { - "args": DefinitionNoParams, - }, - ("users", "GET"): { - "args": { - "additionalProperties": False, - "properties": add_pagination_args("user", { - "id": { - "default": None, - "type": "integer", - "description": "id of the user to get", - }, - "name": { - "default": None, - "type": "string", - "description": "name of the user to get", - }, - }), - }, - }, - ("users", "POST"): { - "json": DefinitionUserCreate, - }, - ("users_id", "DELETE"): { - }, - ("users_id", "GET"): { - "args": DefinitionNoParams, - }, - ("network_devices", "GET"): { - "args": { - "additionalProperties": False, - "properties": add_pagination_args("network device", { - "id": { - "type": "integer", - "description": "id of the net device to get", - }, - "ip_address": { - "type": "string", - "description": "IP of the device to get", - }, - "region_id": { - "type": "string", - "description": "region id of the device to get", - }, - "cloud_id": { - "type": "integer", - "description": "ID of the cloud to get devices", - }, - "name": { - "type": "string", - "description": "name of the device to get", - }, - "details": { - "type": "boolean", - "description": "get detailed information", - }, - "device_type": { - "type": "string", - "description": "type of the device to get", - }, - "vars": { - "type": "string", - "description": "variable filters to get device", - }, - "cell_id": { - "type": "string", - "description": "cell id of the device to get", - }, - "resolved-values": { - "default": True, - "type": "boolean", - }, - }), - }, - }, - ("network_devices_id", "DELETE"): { - }, - ("network_devices_id", "GET"): { - "args": { - "additionalProperties": False, - "properties": { - "resolved-values": { - "default": True, - "type": "boolean", - }, - }, - }, - }, - ("networks_id", "DELETE"): { - }, - ("networks_id", "GET"): { - "args": DefinitionNoParams, - }, - ("networks_id", "PUT"): { - "json": { - "additionalProperties": False, - "properties": { - "name": { - "type": "string", - }, - "cidr": { - "type": "string", - }, - "gateway": { - "type": "string", - }, - "netmask": { - "type": "string", - }, - "ip_block_type": { - "type": "string", - }, - "nss": { - "type": "string", - }, - }, - }, - }, - ("network_devices_id", "PUT"): { - "json": { - "additionalProperties": False, - "properties": { - "ip_address": { - "type": "string", - }, - "device_type": { - "type": "string", - }, - "name": { - "type": "string", - }, - "model_name": { - "type": "string", - }, - "os_version": { - "type": "string", - }, - "vlans": { - "type": "string", - }, - "parent_id": { - "anyOf": [ - { - "type": "integer", - }, - { - "type": "null", - }, - ], - }, - }, - }, - }, - ("network_devices", "POST"): { - "json": DefinitionNetworkDeviceCreate, - }, - ("network_devices_labels", "DELETE"): { - "json": DefinitionsLabel, - }, - ("network_devices_labels", "GET"): { - "args": DefinitionNoParams, - }, - ("network_devices_labels", "PUT"): { - "json": DefinitionsLabel, - }, - ("network_interfaces", "GET"): { - "args": { - "additionalProperties": False, - "properties": add_pagination_args("network interface", { - "id": { - "type": "integer", - "description": "id of the net interface to get", - }, - "device_id": { - "type": "integer", - "description": "device id of the interface to get", - }, - "ip_address": { - "type": "string", - "description": "IP of the interface to get", - }, - "interface_type": { - "type": "string", - "description": "Type of the interface to get", - }, - }), - }, - }, - ("network_interfaces", "POST"): { - "json": DefinitionNetworkInterfaceCreate, - }, - ("network_interfaces_id", "DELETE"): { - }, - ("network_interfaces_id", "GET"): { - "args": DefinitionNoParams, - }, - ("network_interfaces_id", "PUT"): { - "json": { - "additionalProperties": False, - "properties": { - "name": { - "type": "string", - }, - "interface_type": { - "type": "string", - }, - "vlan": { - "type": "string", - }, - "port": { - "type": "integer", - }, - "duplex": { - "type": "string", - }, - "speed": { - "type": "integer", - }, - "link": { - "type": "string", - }, - "cdp": { - "type": "string", - }, - "security": { - "type": "string", - }, - }, - }, - }, - ("networks", "GET"): { - "args": { - "additionalProperties": False, - "properties": add_pagination_args("network", { - "id": { - "type": "integer", - "description": "id of the network to get", - }, - "network_type": { - "type": "string", - "description": "type of the network to get", - }, - "name": { - "type": "string", - "description": "name of the network to get", - }, - "region_id": { - "type": "string", - "description": "region id of the network to get", - }, - "vars": { - "type": "string", - "description": "variable filters to get networks", - }, - "cell_id": { - "type": "string", - "description": "cell idof the network to get", - }, - "resolved-values": { - "default": True, - "type": "boolean", - }, - "details": { - "default": False, - "type": "boolean", - "description": "get detailed information", - }, - }), - }, - }, - ("networks", "POST"): { - "json": DefinitionNetworkCreate, - }, - ("variables_with_resolve", "DELETE"): { - "json": DefinitionDeleteVariables, - }, - ("variables_with_resolve", "GET"): { - "args": { - "additionalProperties": False, - "properties": { - "resolved-values": { - "default": True, - "type": "boolean", - }, - }, - }, - }, - ("variables_with_resolve", "PUT"): { - "json": DefinitionVariablesSource, - }, - ("variables_without_resolve", "DELETE"): { - "json": DefinitionDeleteVariables, - }, - ("variables_without_resolve", "GET"): { - "args": DefinitionNoParams, - }, - ("variables_without_resolve", "PUT"): { - "json": DefinitionVariablesSource, - }, -} - -filters = { - ("ansible_inventory", "GET"): { - 200: { - "headers": None, - "schema": { - "type": "object", - "additionalProperties": False, - "patternProperties": { - "^.+": { - "anyOf": [ - { - "type": "string", - }, - { - "type": "null", - }, - { - "type": "number", - }, - { - "type": "boolean", - }, - { - "type": "integer", - }, - { - "type": "array", - }, - { - "type": "object", - }, - ], - }, - }, - }, - }, - }, - ("devices", "GET"): { - 200: { - "headers": None, - "schema": DefinitionDevicesPaginated, - }, - }, - ("hosts_id", "GET"): { - 200: { - "headers": None, - "schema": DefinitionsHostId, - }, - }, - ("hosts_id", "PUT"): { - 200: { - "headers": None, - "schema": DefinitionsHostId, - }, - }, - ("hosts_id", "DELETE"): { - 204: { - "headers": None, - "schema": None, - }, - }, - ("hosts_labels", "DELETE"): { - 204: { - "headers": None, - "schema": None, - }, - }, - ("hosts_labels", "GET"): { - 200: { - "headers": None, - "schema": DefinitionsLabel, - }, - }, - ("hosts_labels", "PUT"): { - 200: { - "headers": None, - "schema": DefinitionsLabel, - }, - }, - ("hosts", "POST"): { - 201: { - "headers": None, - "schema": DefinitionsHost, - }, - }, - ("hosts", "GET"): { - 200: { - "headers": None, - "schema": paginated_resource("hosts", DefinitionsHost), - }, - }, - ("cells_id", "GET"): { - 200: { - "headers": None, - "schema": DefinitionsCellId, - }, - }, - ("cells_id", "PUT"): { - 200: { - "headers": None, - "schema": DefinitionsCellId, - }, - }, - ("cells_id", "DELETE"): { - 204: { - "headers": None, - "schema": None, - }, - }, - ("cells", "POST"): { - 201: { - "headers": None, - "schema": DefinitionsCell, - }, - }, - ("cells", "GET"): { - 200: { - "headers": None, - "schema": paginated_resource("cells", DefinitionsCell), - }, - }, - ("regions", "POST"): { - 201: { - "headers": None, - "schema": DefinitionsRegion, - }, - }, - ("regions", "GET"): { - 200: { - "headers": None, - "schema": paginated_resource("regions", DefinitionsRegion), - }, - }, - ("regions_id", "GET"): { - 200: { - "headers": None, - "schema": DefinitionsRegionId, - }, - }, - ("regions_id", "PUT"): { - 200: { - "headers": None, - "schema": DefinitionsRegionId, - }, - }, - ("regions_id", "DELETE"): { - 204: { - "headers": None, - "schema": None, - }, - }, - ("clouds", "POST"): { - 201: { - "headers": None, - "schema": DefinitionsCloud, - }, - }, - ("clouds", "GET"): { - 200: { - "headers": None, - "schema": paginated_resource("clouds", DefinitionsCloud), - }, - }, - ("clouds_id", "GET"): { - 200: { - "headers": None, - "schema": DefinitionsCloudId, - }, - }, - ("clouds_id", "PUT"): { - 200: { - "headers": None, - "schema": DefinitionsCloudId, - }, - }, - ("clouds_id", "DELETE"): { - 204: { - "headers": None, - "schema": None, - }, - }, - ("projects", "GET"): { - 200: { - "headers": None, - "schema": paginated_resource("projects", DefinitionProject), - }, - }, - ("projects", "POST"): { - 201: { - "headers": None, - "schema": DefinitionProject, - }, - }, - ("users", "GET"): { - 200: { - "headers": None, - "schema": paginated_resource("users", DefinitionUser), - }, - }, - ("users", "POST"): { - 201: { - "headers": None, - "schema": DefinitionUser, - }, - }, - ("projects_id", "GET"): { - 200: { - "headers": None, - "schema": DefinitionProject, - }, - }, - ("projects_id", "DELETE"): { - 204: { - "headers": None, - "schema": None, - }, - }, - ("users_id", "GET"): { - 200: { - "headers": None, - "schema": DefinitionUser, - }, - }, - ("users_id", "DELETE"): { - 204: { - "headers": None, - "schema": None, - }, - }, - ("network_devices", "GET"): { - 200: { - "headers": None, - "schema": paginated_resource("network_devices", - DefinitionNetworkDeviceId), - }, - }, - ("network_devices", "POST"): { - 201: { - "headers": None, - "schema": DefinitionNetworkDeviceId, - }, - }, - ("network_devices_id", "DELETE"): { - 204: { - "headers": None, - "schema": None, - }, - }, - ("network_devices_id", "GET"): { - 200: { - "headers": None, - "schema": DefinitionNetworkDeviceId, - }, - }, - ("network_devices_labels", "DELETE"): { - 204: { - "headers": None, - "schema": None, - }, - }, - ("network_devices_labels", "GET"): { - 200: { - "headers": None, - "schema": DefinitionsLabel, - }, - }, - ("network_devices_labels", "PUT"): { - 200: { - "headers": None, - "schema": DefinitionsLabel, - }, - }, - ("network_devices_id", "PUT"): { - 200: { - "headers": None, - "schema": DefinitionNetworkDeviceId, - }, - }, - ("networks", "GET"): { - 200: { - "headers": None, - "schema": paginated_resource("networks", DefinitionNetwork), - }, - }, - ("networks", "POST"): { - 201: { - "headers": None, - "schema": DefinitionNetwork, - }, - }, - ("networks_id", "DELETE"): { - 204: { - "headers": None, - "schema": None, - }, - }, - ("networks_id", "GET"): { - 200: { - "headers": None, - "schema": DefinitionNetworkId, - }, - }, - ("networks_id", "PUT"): { - 200: { - "headers": None, - "schema": DefinitionNetworkId, - }, - }, - ("network_interfaces", "GET"): { - 200: { - "headers": None, - "schema": paginated_resource("network_interfaces", - DefinitionNetworkInterface), - }, - }, - ("network_interfaces", "POST"): { - 201: { - "headers": None, - "schema": DefinitionNetworkInterface, - }, - }, - ("network_interfaces_id", "DELETE"): { - 204: { - "headers": None, - "schema": None, - }, - }, - ("network_interfaces_id", "GET"): { - 200: { - "headers": None, - "schema": DefinitionNetworkInterfaceId, - }, - }, - ("network_interfaces_id", "PUT"): { - 200: { - "headers": None, - "schema": DefinitionNetworkInterfaceId, - }, - }, - ("variables_with_resolve", "GET"): { - 200: { - "headers": None, - "schema": { - "type": "object", - "additionalProperties": False, - "properties": { - "variables": DefinitionVariablesSource, - }, - }, - }, - }, - ("variables_with_resolve", "PUT"): { - 200: { - "headers": None, - "schema": { - "type": "object", - "additionalProperties": False, - "properties": { - "variables": DefinitionVariablesSource, - }, - }, - }, - }, - ("variables_with_resolve", "DELETE"): { - 204: { - "headers": None, - "schema": None, - }, - }, - ("variables_without_resolve", "DELETE"): { - 204: { - "headers": None, - "schema": None, - }, - }, - ("variables_without_resolve", "GET"): { - 200: { - "headers": None, - "schema": { - "type": "object", - "additionalProperties": False, - "properties": { - "variables": DefinitionVariablesSource, - }, - }, - }, - }, - ("variables_without_resolve", "PUT"): { - 200: { - "headers": None, - "schema": { - "type": "object", - "additionalProperties": False, - "properties": { - "variables": DefinitionVariablesSource, - }, - }, - }, - }, -} diff --git a/craton/api/v1/validators.py b/craton/api/v1/validators.py deleted file mode 100644 index 014105f..0000000 --- a/craton/api/v1/validators.py +++ /dev/null @@ -1,285 +0,0 @@ -# The code is auto generated, your change will be overwritten by -# code generating. - -from functools import wraps - -from werkzeug.datastructures import MultiDict, Headers -from flask import request -from jsonschema import Draft4Validator -from oslo_log import log - -from craton.api.v1.schemas import filters -from craton.api.v1.schemas import validators -from craton import db as dbapi -from craton import exceptions - - -LOG = log.getLogger(__name__) - - -def merge_default(schema, value): - # TODO: more types support - type_defaults = { - 'integer': 9573, - 'string': 'something', - 'object': {}, - 'array': [], - 'boolean': False - } - - return normalize(schema, value, type_defaults)[0] - - -def normalize(schema, data, required_defaults=None): - - if required_defaults is None: - required_defaults = {} - errors = [] - - class DataWrapper(object): - - def __init__(self, data): - super(DataWrapper, self).__init__() - self.data = data - - def get(self, key, default=None): - if isinstance(self.data, dict): - return self.data.get(key, default) - if hasattr(self.data, key): - return getattr(self.data, key) - else: - return default - - def has(self, key): - if isinstance(self.data, dict): - return key in self.data - return hasattr(self.data, key) - - def keys(self): - if isinstance(self.data, dict): - return self.data.keys() - return vars(self.data).keys() - - def _normalize_dict(schema, data): - result = {} - if not isinstance(data, DataWrapper): - data = DataWrapper(data) - - for pattern, _schema in (schema.get('patternProperties', {})).items(): - if pattern == "^.+": - for key in data.keys(): - result[key] = _normalize(_schema, data.get(key)) - - for key, _schema in schema.get('properties', {}).items(): - # set default - type_ = _schema.get('type', 'object') - if ('default' not in _schema and - key in schema.get('required', []) and - type_ in required_defaults): - _schema['default'] = required_defaults[type_] - - # get value - if data.has(key): - result[key] = _normalize(_schema, data.get(key)) - elif 'default' in _schema: - result[key] = _schema['default'] - elif key in schema.get('required', []): - errors.append(dict(name='property_missing', - message='`%s` is required' % key)) - - for _schema in schema.get('allOf', []): - rs_component = _normalize(_schema, data) - rs_component.update(result) - result = rs_component - - if schema.get('anyOf'): - # In case of anyOf simply return data, since we dont - # care in normalization of the data as long as - # its been verified. - result = data.data - - additional_properties_schema = schema.get('additionalProperties', - False) - if additional_properties_schema: - aproperties_set = set(data.keys()) - set(result.keys()) - for pro in aproperties_set: - result[pro] = _normalize(additional_properties_schema, - data.get(pro)) - - return result - - def _normalize_list(schema, data): - result = [] - if hasattr(data, '__iter__') and not isinstance(data, dict): - for item in data: - result.append(_normalize(schema.get('items'), item)) - elif 'default' in schema: - result = schema['default'] - return result - - def _normalize_default(schema, data): - if data is None: - return schema.get('default') - else: - return data - - def _normalize(schema, data): - if not schema: - return None - funcs = { - 'object': _normalize_dict, - 'array': _normalize_list, - 'default': _normalize_default, - } - type_ = schema.get('type', 'object') - if type_ not in funcs: - type_ = 'default' - - return funcs[type_](schema, data) - - return _normalize(schema, data), errors - - -class FlaskValidatorAdaptor(object): - - def __init__(self, schema): - self.validator = Draft4Validator(schema) - - def type_convert(self, obj): - if obj is None: - return None - if isinstance(obj, (dict, list)) and not isinstance(obj, MultiDict): - return obj - if isinstance(obj, Headers): - obj = MultiDict(obj) - result = dict() - - convert_funs = { - 'integer': lambda v: int(v[0]), - 'boolean': lambda v: v[0].lower() not in ['n', 'no', - 'false', '', '0'], - 'null': lambda v: None, - 'number': lambda v: float(v[0]), - 'string': lambda v: v[0] - } - - def convert_array(type_, v): - func = convert_funs.get(type_, lambda v: v[0]) - return [func([i]) for i in v] - - for k, values in obj.lists(): - prop = self.validator.schema['properties'].get(k, {}) - type_ = prop.get('type') - fun = convert_funs.get(type_, lambda v: v[0]) - if type_ == 'array': - item_type = prop.get('items', {}).get('type') - result[k] = convert_array(item_type, values) - else: - result[k] = fun(values) - return result - - def validate(self, value): - value = self.type_convert(value) - errors = sorted(e.message for e in self.validator.iter_errors(value)) - if errors: - msg = "The request included the following errors:\n- {}".format( - "\n- ".join(errors) - ) - raise exceptions.BadRequest(message=msg) - return merge_default(self.validator.schema, value) - - -def request_validate(view): - - @wraps(view) - def wrapper(*args, **kwargs): - endpoint = request.endpoint.partition('.')[-1] - # data - method = request.method - if method == 'HEAD': - method = 'GET' - locations = validators.get((endpoint, method), {}) - data_type = {"json": "request_data", "args": "request_args"} - for location, schema in locations.items(): - value = getattr(request, location, MultiDict()) - validator = FlaskValidatorAdaptor(schema) - result = validator.validate(value) - LOG.info("Validated request %s: %s" % (location, result)) - if schema.get("maxProperties") == 0: - continue - else: - kwargs[data_type[location]] = result - - context = request.environ['context'] - return view(*args, context=context, **kwargs) - - return wrapper - - -def ensure_project_exists(view): - - @wraps(view) - def wrapper(*args, **kwargs): - context = request.environ['context'] - if context.using_keystone: - find_or_create_project(request, context) - return view(*args, **kwargs) - - return wrapper - - -def response_filter(view): - - @wraps(view) - def wrapper(*args, **kwargs): - resp = view(*args, **kwargs) - - endpoint = request.endpoint.partition('.')[-1] - method = request.method - if method == 'HEAD': - method = 'GET' - try: - resp_filter = filters[(endpoint, method)] - except KeyError: - LOG.error( - '"(%(endpoint)s, %(method)s)" is not defined in the response ' - 'filters.', - {"endpoint": endpoint, "method": method} - ) - raise exceptions.UnknownException - - body, status, headers = resp - - try: - schemas = resp_filter[status] - except KeyError: - LOG.error( - 'The status code %(status)d is not defined in the response ' - 'filter "(%(endpoint)s, %(method)s)".', - {"status": status, "endpoint": endpoint, "method": method} - ) - raise exceptions.UnknownException - - body, errors = normalize(schemas['schema'], body) - if schemas['headers']: - headers, header_errors = normalize( - {'properties': schemas['headers']}, headers) - errors.extend(header_errors) - if errors: - LOG.error('Expectation Failed: %s', errors) - raise exceptions.UnknownException - - return body, status, headers - return wrapper - - -def find_or_create_project(request, context): - project_id = context.tenant - token_info = context.token_info - try: - dbapi.projects_get_by_id(context, project_id) - except exceptions.NotFound: - LOG.info('Adding Project "%s" to projects table', project_id) - dbapi.projects_create(context, - {'id': project_id, - 'name': token_info['project']['name']}) diff --git a/craton/cmd/README.rst b/craton/cmd/README.rst deleted file mode 100644 index b0e4cfb..0000000 --- a/craton/cmd/README.rst +++ /dev/null @@ -1,3 +0,0 @@ -########## -Craton CLI -########## diff --git a/craton/cmd/__init__.py b/craton/cmd/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/craton/cmd/api.py b/craton/cmd/api.py deleted file mode 100644 index 7712734..0000000 --- a/craton/cmd/api.py +++ /dev/null @@ -1,30 +0,0 @@ -import os -import sys -from wsgiref import simple_server - -from oslo_config import cfg -from oslo_log import log as logging - -from craton import api - -LOG = logging.getLogger(__name__) - -CONF = cfg.CONF - - -def main(): - logging.register_options(CONF) - CONF(sys.argv[1:], - project='craton-api', - default_config_files=[]) - logging.setup(CONF, 'craton-api') - - app = api.load_app() - host, port = cfg.CONF.api.host, cfg.CONF.api.port - srv = simple_server.make_server(host, port, app) - LOG.info("Starting API server in PID: %s" % os.getpid()) - srv.serve_forever() - - -if __name__ == "__main__": - main() diff --git a/craton/cmd/dbsync.py b/craton/cmd/dbsync.py deleted file mode 100644 index b570400..0000000 --- a/craton/cmd/dbsync.py +++ /dev/null @@ -1,86 +0,0 @@ -from oslo_config import cfg - -from craton.db.sqlalchemy import migration - - -CONF = cfg.CONF - - -class DBCommand(object): - - def upgrade(self): - migration.upgrade(CONF.command.revision) - - def revision(self): - migration.revision(CONF.command.message, CONF.command.autogenerate) - - def stamp(self): - migration.stamp(CONF.command.revision) - - def version(self): - print(migration.version()) - - def create_schema(self): - migration.create_schema() - - def bootstrap_project(self): - name = 'bootstrap' - project = migration.create_bootstrap_project( - name, - db_uri=CONF.database.connection) - user = migration.create_bootstrap_user( - project.id, - name, - db_uri=CONF.database.connection) - - msg = ("\nProjectId: %s\nUsername: %s\nAPIKey: %s" - % (user.project_id, user.username, user.api_key)) - print(msg) - - -def add_command_parsers(subparsers): - command_object = DBCommand() - - parser = subparsers.add_parser( - 'upgrade', - help=("Upgrade the database schema to the latest version. " - "Optionally, use --revision to specify an alembic revision " - "string to upgrade to.")) - parser.set_defaults(func=command_object.upgrade) - parser.add_argument('--revision', nargs='?') - - parser = subparsers.add_parser('stamp') - parser.add_argument('--revision', nargs='?') - parser.set_defaults(func=command_object.stamp) - - parser = subparsers.add_parser( - 'revision', - help=("Create a new alembic revision. " - "Use --message to set the message string.")) - parser.add_argument('-m', '--message') - parser.add_argument('--autogenerate', action='store_true') - parser.set_defaults(func=command_object.revision) - - parser = subparsers.add_parser( - 'version', - help=("Print the current version information and exit.")) - parser.set_defaults(func=command_object.version) - - parser = subparsers.add_parser( - 'create_schema', - help=("Create the database schema.")) - parser.set_defaults(func=command_object.create_schema) - - parser = subparsers.add_parser('bootstrap') - parser.set_defaults(func=command_object.bootstrap_project) - - -def main(): - command_opt = cfg.SubCommandOpt('command', - title='Command', - help=('Available commands'), - handler=add_command_parsers) - - CONF.register_cli_opt(command_opt) - CONF(project='craton-api') - CONF.command.func() diff --git a/craton/cmd/worker.py b/craton/cmd/worker.py deleted file mode 100644 index fe169a5..0000000 --- a/craton/cmd/worker.py +++ /dev/null @@ -1,84 +0,0 @@ -import contextlib -import signal -import sys - -from oslo_config import cfg -from oslo_log import log as logging -from oslo_utils import uuidutils -from stevedore import driver -from taskflow import engines -from taskflow.persistence import models - -from craton.workflow import worker - -LOG = logging.getLogger(__name__) -CONF = cfg.CONF - - -# This needs to be a globally accessible (ie: top-level) function, so -# flow recovery can execute it to re-create the intended workflows. -def workflow_factory(name, *args, **kwargs): - mgr = driver.DriverManager( - namespace='craton.workflow', name=name, - invoke_on_load=True, invoke_args=args, invoke_kwds=kwargs) - return mgr.driver.workflow() - - -def main(): - logging.register_options(CONF) - CONF(sys.argv[1:], project='craton-worker', default_config_files=[]) - logging.setup(CONF, 'craton') - - persistence, board, conductor = worker.start(CONF) - - def stop(signum, _frame): - LOG.info('Caught signal %s, gracefully exiting', signum) - conductor.stop() - signal.signal(signal.SIGTERM, stop) - - # TODO(gus): eventually feeding in jobs will happen elsewhere and - # main() will end here. - # - # conductor.wait() - # sys.exit(0) - - def make_save_book(persistence, job_id, - flow_plugin, plugin_args=(), plugin_kwds={}): - flow_id = book_id = job_id # Do these need to be different? - book = models.LogBook(book_id) - detail = models.FlowDetail(flow_id, uuidutils.generate_uuid()) - book.add(detail) - - factory_args = [flow_plugin] + list(plugin_args) - factory_kwargs = plugin_kwds - engines.save_factory_details(detail, workflow_factory, - factory_args, factory_kwargs) - with contextlib.closing(persistence.get_connection()) as conn: - conn.save_logbook(book) - return book - - # Feed in example task - job_uuid = uuidutils.generate_uuid() - LOG.debug('Posting job %s', job_uuid) - details = { - 'store': { - 'foo': 'bar', - }, - } - - job = board.post( - job_uuid, - book=make_save_book( - persistence, job_uuid, - 'testflow', plugin_kwds=dict(task_delay=2)), - details=details) - - # Run forever. TODO(gus): This is what we want to do in production - # conductor.wait() - job.wait() - LOG.debug('Job finished: %s', job.state) - conductor.stop() - - -if __name__ == '__main__': - main() diff --git a/craton/db/__init__.py b/craton/db/__init__.py deleted file mode 100644 index 2d7e8b2..0000000 --- a/craton/db/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -""" -DB abstraction for Craton Inventory -""" - -from craton.db.api import * # noqa diff --git a/craton/db/api.py b/craton/db/api.py deleted file mode 100644 index 1fabb94..0000000 --- a/craton/db/api.py +++ /dev/null @@ -1,335 +0,0 @@ -"""Defines interface for DB access.""" - -from collections import namedtuple - -from oslo_config import cfg -from oslo_db import api as db_api - -db_opts = [ - cfg.StrOpt('db_backend', default='sqlalchemy', - help='The backend to use for DB.'), -] - -CONF = cfg.CONF -CONF.register_opts(db_opts) - -# entrypoint namespace for db backend -BACKEND_MAPPING = {'sqlalchemy': 'craton.db.sqlalchemy.api'} -IMPL = db_api.DBAPI.from_config(cfg.CONF, backend_mapping=BACKEND_MAPPING, - lazy=True) - - -# Blame supports generic blame tracking for variables -# TODO(jimbaker) add additional governance support, such as -# versioning, user, notes - -Blame = namedtuple('Blame', ['source', 'variable']) - - -def devices_get_all(context, filters, pagination_params): - """Get all available devices.""" - return IMPL.devices_get_all(context, filters, pagination_params) - - -def get_user_info(context, user): - return IMPL.get_user_info(context, user) - - -def resource_get_by_id(context, resources, resource_id): - """Get resource for the unique resource id.""" - return IMPL.resource_get_by_id(context, resources, resource_id) - - -def variables_update_by_resource_id(context, resources, resource_id, data): - """Update/create existing resource's variables.""" - return IMPL.variables_update_by_resource_id( - context, - resources, - resource_id, - data, - ) - - -def variables_delete_by_resource_id(context, resources, resource_id, data): - """Delete the existing variables, if present, from resource's data.""" - return IMPL.variables_delete_by_resource_id( - context, - resources, - resource_id, - data, - ) - - -# Cells - -def cells_get_all(context, filters, pagination_params): - """Get all available cells.""" - return IMPL.cells_get_all(context, filters, pagination_params) - - -def cells_get_by_id(context, cell_id): - """Get cell detail for the unique cell id.""" - return IMPL.cells_get_by_id(context, cell_id) - - -def cells_create(context, values): - """Create a new cell.""" - return IMPL.cells_create(context, values) - - -def cells_update(context, cell_id, values): - """Update an existing cell.""" - return IMPL.cells_update(context, cell_id, values) - - -def cells_delete(context, cell_id): - """Delete an existing cell.""" - return IMPL.cells_delete(context, cell_id) - -# Regions - - -def regions_get_all(context, filters, pagination_params): - """Get all available regions.""" - return IMPL.regions_get_all(context, filters, pagination_params) - - -def regions_get_by_name(context, name): - """Get cell detail for the region with given name.""" - return IMPL.regions_get_by_name(context, name) - - -def regions_get_by_id(context, region_id): - """Get cell detail for the region with given id.""" - return IMPL.regions_get_by_id(context, region_id) - - -def regions_create(context, values): - """Create a new region.""" - return IMPL.regions_create(context, values) - - -def regions_update(context, region_id, values): - """Update an existing region.""" - return IMPL.regions_update(context, region_id, values) - - -def regions_delete(context, region_id): - """Delete an existing region.""" - return IMPL.regions_delete(context, region_id) - -# Clouds - - -def clouds_get_all(context, filters, pagination_params): - """Get all available clouds.""" - return IMPL.clouds_get_all(context, filters, pagination_params) - - -def clouds_get_by_name(context, name): - """Get clouds with given name.""" - return IMPL.clouds_get_by_name(context, name) - - -def clouds_get_by_id(context, cloud_id): - """Get cloud detail for the cloud with given id.""" - return IMPL.clouds_get_by_id(context, cloud_id) - - -def clouds_create(context, values): - """Create a new cloud.""" - return IMPL.clouds_create(context, values) - - -def clouds_update(context, cloud_id, values): - """Update an existing cloud.""" - return IMPL.clouds_update(context, cloud_id, values) - - -def clouds_delete(context, cloud_id): - """Delete an existing cloud.""" - return IMPL.clouds_delete(context, cloud_id) - -# Hosts - - -def hosts_get_all(context, filters, pagination_params): - """Get all hosts.""" - return IMPL.hosts_get_all(context, filters, pagination_params) - - -def hosts_get_by_id(context, host_id): - """Get details for the host with given id.""" - return IMPL.hosts_get_by_id(context, host_id) - - -def hosts_create(context, values): - """Create a new host.""" - return IMPL.hosts_create(context, values) - - -def hosts_update(context, host_id, values): - """Update an existing host.""" - return IMPL.hosts_update(context, host_id, values) - - -def hosts_delete(context, host_id): - """Delete an existing host.""" - return IMPL.hosts_delete(context, host_id) - - -def hosts_labels_delete(context, host_id, labels): - """Delete existing device label(s).""" - return IMPL.hosts_labels_delete(context, host_id, labels) - - -def hosts_labels_update(context, host_id, labels): - """Update existing device label entirely.""" - return IMPL.hosts_labels_update(context, host_id, labels) - - -# Projects - -def projects_get_all(context, filters, pagination_params): - """Get all the projects.""" - return IMPL.projects_get_all(context, filters, pagination_params) - - -def projects_get_by_name(context, project_name, filters, pagination_params): - """Get all projects that match the given name.""" - return IMPL.projects_get_by_name(context, project_name, filters, - pagination_params) - - -def projects_get_by_id(context, project_id): - """Get project by its id.""" - return IMPL.projects_get_by_id(context, project_id) - - -def projects_create(context, values): - """Create a new project with given values.""" - return IMPL.projects_create(context, values) - - -def projects_delete(context, project_id): - """Delete an existing project given by its id.""" - return IMPL.projects_delete(context, project_id) - - -# Users - -def users_get_all(context, filters, pagination_params): - """Get all the users.""" - return IMPL.users_get_all(context, filters, pagination_params) - - -def users_get_by_name(context, user_name, filters, pagination_params): - """Get all users that match the given username.""" - return IMPL.users_get_by_name(context, user_name, filters, - pagination_params) - - -def users_get_by_id(context, user_id): - """Get user by its id.""" - return IMPL.users_get_by_id(context, user_id) - - -def users_create(context, values): - """Create a new user with given values.""" - return IMPL.users_create(context, values) - - -def users_delete(context, user_id): - """Delete an existing user given by its id.""" - return IMPL.users_delete(context, user_id) - - -# Networks - -def networks_get_all(context, filters, pagination_params): - """Get all networks for the given region.""" - return IMPL.networks_get_all(context, filters, pagination_params) - - -def networks_get_by_id(context, network_id): - """Get a given network by its id.""" - return IMPL.networks_get_by_id(context, network_id) - - -def networks_create(context, values): - """Create a new network.""" - return IMPL.networks_create(context, values) - - -def networks_update(context, network_id, values): - """Update an existing network.""" - return IMPL.networks_update(context, network_id, values) - - -def networks_delete(context, network_id): - """Delete existing network.""" - return IMPL.networks_delete(context, network_id) - - -def network_devices_get_all(context, filters, pagination_params): - """Get all network devices.""" - return IMPL.network_devices_get_all(context, filters, pagination_params) - - -def network_devices_get_by_id(context, network_device_id): - """Get a given network device by its id.""" - return IMPL.network_devices_get_by_id(context, network_device_id) - - -def network_devices_create(context, values): - """Create a new network device.""" - return IMPL.network_devices_create(context, values) - - -def network_devices_update(context, network_device_id, values): - """Update an existing network device""" - return IMPL.network_devices_update(context, network_device_id, values) - - -def network_devices_delete(context, network_device_id): - """Delete existing network device.""" - return IMPL.network_devices_delete(context, network_device_id) - - -def network_devices_labels_delete(context, network_device_id, labels): - """Delete network device labels.""" - return IMPL.network_devices_labels_delete(context, network_device_id, - labels) - - -def network_devices_labels_update(context, network_device_id, labels): - """Update network device labels.""" - return IMPL.network_devices_labels_update(context, network_device_id, - labels) - - -def network_interfaces_get_all(context, filters, pagination_params): - """Get all network interfaces.""" - return IMPL.network_interfaces_get_all( - context, filters, pagination_params, - ) - - -def network_interfaces_get_by_id(context, interface_id): - """Get a given network interface by its id.""" - return IMPL.network_interfaces_get_by_id(context, interface_id) - - -def network_interfaces_create(context, values): - """Create a new network interface.""" - return IMPL.network_interfaces_create(context, values) - - -def network_interfaces_update(context, interface_id, values): - """Update an existing network interface.""" - return IMPL.network_interfaces_update(context, interface_id, values) - - -def network_interfaces_delete(context, interface_id): - """Delete existing network interface.""" - return IMPL.network_interfaces_delete(context, interface_id) diff --git a/craton/db/sqlalchemy/__init__.py b/craton/db/sqlalchemy/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/craton/db/sqlalchemy/alembic.ini b/craton/db/sqlalchemy/alembic.ini deleted file mode 100644 index 4d0b584..0000000 --- a/craton/db/sqlalchemy/alembic.ini +++ /dev/null @@ -1,68 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = %(here)s/alembic - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# max length of characters to apply to the -# "slug" field -#truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; this defaults -# to alembic/versions. When using multiple version -# directories, initial revisions must be specified with --version-path -# version_locations = %(here)s/bar %(here)s/bat alembic/versions - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -#sqlalchemy.url = driver://user:pass@localhost/dbname - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/craton/db/sqlalchemy/alembic/README b/craton/db/sqlalchemy/alembic/README deleted file mode 100644 index 50c73e4..0000000 --- a/craton/db/sqlalchemy/alembic/README +++ /dev/null @@ -1,11 +0,0 @@ -Please see https://alembic.readthedocs.org/en/latest/index.html for general documentation - -To create alembic migrations use: -$ craton-dbsync --config-file=craton.conf revision --message "revision description" --autogenerate - -Stamp db with most recent migration version, without actually running migrations -$ craton-dbsync --config-file=craton.conf stamp head - -Upgrade can be performed by: -$ craton-dbsync --config-file=craton.conf upgrade -$ craton-dbsync --config-file=craton.conf upgrade head diff --git a/craton/db/sqlalchemy/alembic/env.py b/craton/db/sqlalchemy/alembic/env.py deleted file mode 100644 index 092e68b..0000000 --- a/craton/db/sqlalchemy/alembic/env.py +++ /dev/null @@ -1,66 +0,0 @@ -from __future__ import with_statement -from alembic import context -from logging.config import fileConfig - -from craton.db.sqlalchemy import api as sa_api -from craton.db.sqlalchemy import models as db_models - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = db_models.Base.metadata - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, target_metadata=target_metadata, literal_binds=True) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - engine = sa_api.get_engine() - with engine.connect() as connection: - context.configure( - connection=connection, - target_metadata=target_metadata - ) - - with context.begin_transaction(): - context.run_migrations() - - -run_migrations_online() diff --git a/craton/db/sqlalchemy/alembic/script.py.mako b/craton/db/sqlalchemy/alembic/script.py.mako deleted file mode 100644 index 43c0940..0000000 --- a/craton/db/sqlalchemy/alembic/script.py.mako +++ /dev/null @@ -1,24 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -branch_labels = ${repr(branch_labels)} -depends_on = ${repr(depends_on)} - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} diff --git a/craton/db/sqlalchemy/alembic/versions/ffdc1a500db1_craton_inventory_init.py b/craton/db/sqlalchemy/alembic/versions/ffdc1a500db1_craton_inventory_init.py deleted file mode 100644 index c0dc28c..0000000 --- a/craton/db/sqlalchemy/alembic/versions/ffdc1a500db1_craton_inventory_init.py +++ /dev/null @@ -1,407 +0,0 @@ -"""craton_inventory_init - -Revision ID: ffdc1a500db1 -Revises: -Create Date: 2016-06-03 09:52:55.302936 - -""" - -# revision identifiers, used by Alembic. -revision = 'ffdc1a500db1' -down_revision = None -branch_labels = None -depends_on = None - -from alembic import op -import sqlalchemy as sa -import sqlalchemy_utils - - -def upgrade(): - op.create_table( - 'variable_association', - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('updated_at', sa.DateTime, nullable=True), - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('discriminator', sa.String(length=50), nullable=False), - ) - - op.create_table( - 'variables', - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('updated_at', sa.DateTime, nullable=True), - sa.Column( - 'association_id', sa.Integer, - sa.ForeignKey( - 'variable_association.id', - name='fk_variables__variables_association', - ondelete='cascade'), - primary_key=True), - sa.Column('key_', sa.String(length=255), primary_key=True), - sa.Column('value_', sa.JSON, nullable=True), - ) - - op.create_table( - 'projects', - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('updated_at', sa.DateTime, nullable=True), - sa.Column( - 'id', sqlalchemy_utils.types.UUIDType(binary=False), - primary_key=True), - sa.Column( - 'variable_association_id', sa.Integer, - sa.ForeignKey( - 'variable_association.id', - name='fk_projects__variable_association'), - nullable=False), - sa.Column('name', sa.String(length=255), nullable=True), - ) - - op.create_table( - 'users', - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('updated_at', sa.DateTime, nullable=True), - sa.Column('id', sa.Integer, primary_key=True), - sa.Column( - 'project_id', sqlalchemy_utils.types.UUIDType(binary=False), - sa.ForeignKey( - 'projects.id', name='fk_users__projects', ondelete='cascade'), - nullable=False), - sa.Column( - 'variable_association_id', sa.Integer, - sa.ForeignKey( - 'variable_association.id', - name='fk_users__variable_association'), - nullable=False), - sa.Column('username', sa.String(length=255), nullable=True), - sa.Column('api_key', sa.String(length=36), nullable=True), - sa.Column('is_root', sa.Boolean, nullable=True), - sa.Column('is_admin', sa.Boolean, nullable=True), - sa.UniqueConstraint( - 'username', 'project_id', - name='uq_users_username_project_id'), - ) - op.create_index( - op.f('ix_users_project_id'), 'users', ['project_id'], unique=False) - - op.create_table( - 'clouds', - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('updated_at', sa.DateTime, nullable=True), - sa.Column('id', sa.Integer, primary_key=True), - sa.Column( - 'project_id', sqlalchemy_utils.types.UUIDType(binary=False), - sa.ForeignKey( - 'projects.id', name='fk_clouds__projects', ondelete='cascade'), - nullable=False), - sa.Column( - 'variable_association_id', sa.Integer, - sa.ForeignKey( - 'variable_association.id', - name='fk_clouds__variable_association'), - nullable=False), - sa.Column('name', sa.String(length=255), nullable=True), - sa.Column('note', sa.Text, nullable=True), - sa.UniqueConstraint( - 'project_id', 'name', - name='uq_clouds__project_id__name'), - ) - op.create_index( - op.f('ix_clouds_project_id'), 'clouds', ['project_id'], unique=False) - - op.create_table( - 'regions', - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('updated_at', sa.DateTime, nullable=True), - sa.Column('id', sa.Integer, primary_key=True), - sa.Column( - 'project_id', sqlalchemy_utils.types.UUIDType(binary=False), - sa.ForeignKey( - 'projects.id', - name='fk_projects__regions', ondelete='cascade')), - sa.Column( - 'cloud_id', sa.Integer, - sa.ForeignKey( - 'clouds.id', name='fk_regions__clouds', ondelete='cascade'), - nullable=False), - sa.Column( - 'variable_association_id', sa.Integer, - sa.ForeignKey( - 'variable_association.id', - name='fk_regions__variable_association'), - nullable=False), - sa.Column('name', sa.String(length=255), nullable=True), - sa.Column('note', sa.Text, nullable=True), - sa.UniqueConstraint( - 'cloud_id', 'name', name='uq_regions__cloud_id__name'), - ) - - op.create_index( - op.f('ix_regions_project_id'), 'regions', ['project_id'], unique=False) - op.create_index( - op.f('ix_regions_cloud_id'), 'regions', ['cloud_id'], unique=False) - - op.create_table( - 'cells', - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('updated_at', sa.DateTime, nullable=True), - sa.Column('id', sa.Integer, primary_key=True), - sa.Column( - 'project_id', sqlalchemy_utils.types.UUIDType(binary=False), - sa.ForeignKey( - 'projects.id', name='fk_cells__projects', ondelete='cascade'), - nullable=False), - sa.Column( - 'cloud_id', sa.Integer, - sa.ForeignKey( - 'clouds.id', name='fk_cells__clouds', ondelete='cascade'), - nullable=False), - sa.Column( - 'region_id', sa.Integer, - sa.ForeignKey( - 'regions.id', name='fk_cells__regions', ondelete='cascade'), - nullable=False), - sa.Column( - 'variable_association_id', sa.Integer, - sa.ForeignKey( - 'variable_association.id', - name='fk_cells__variable_association'), - nullable=False), - sa.Column('name', sa.String(length=255), nullable=True), - sa.Column('note', sa.Text, nullable=True), - sa.UniqueConstraint( - 'region_id', 'name', name='uq_cells__region_id__name'), - ) - op.create_index( - op.f('ix_cells_project_id'), 'cells', ['project_id'], unique=False) - op.create_index( - op.f('ix_cells_cloud_id'), 'cells', ['cloud_id'], unique=False) - op.create_index( - op.f('ix_cells_region_id'), 'cells', ['region_id'], unique=False) - - op.create_table( - 'networks', - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('updated_at', sa.DateTime, nullable=True), - sa.Column('id', sa.Integer, primary_key=True), - sa.Column( - 'project_id', sqlalchemy_utils.types.UUIDType(binary=False), - sa.ForeignKey( - 'projects.id', - name='fk_networks__projects', ondelete='cascade'), - nullable=False), - sa.Column( - 'cloud_id', sa.Integer, - sa.ForeignKey( - 'clouds.id', name='fk_networks__clouds', ondelete='cascade'), - nullable=False), - sa.Column( - 'region_id', sa.Integer, - sa.ForeignKey( - 'regions.id', name='fk_networks__regions', ondelete='cascade'), - nullable=False), - sa.Column( - 'cell_id', sa.Integer, - sa.ForeignKey( - 'cells.id', name='fk_networks__cells', ondelete='cascade'), - nullable=True), - sa.Column( - 'variable_association_id', sa.Integer, - sa.ForeignKey( - 'variable_association.id', - name='fk_networks__variable_association'), - nullable=False), - sa.Column('name', sa.String(length=255), nullable=True), - sa.Column('cidr', sa.String(length=255), nullable=True), - sa.Column('gateway', sa.String(length=255), nullable=True), - sa.Column('netmask', sa.String(length=255), nullable=True), - sa.Column('ip_block_type', sa.String(length=255), nullable=True), - sa.Column('nss', sa.String(length=255), nullable=True), - sa.UniqueConstraint( - 'name', 'project_id', 'region_id', - name='uq_networks__name__project_id__region_id'), - ) - op.create_index( - op.f('ix_networks_cell_id'), 'networks', ['cell_id'], unique=False) - op.create_index( - op.f('ix_networks_project_id'), 'networks', ['project_id'], - unique=False) - op.create_index( - op.f('ix_networks_region_id'), 'networks', ['region_id'], - unique=False) - - op.create_table( - 'devices', - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('updated_at', sa.DateTime, nullable=True), - sa.Column('id', sa.Integer, primary_key=True), - sa.Column( - 'project_id', sqlalchemy_utils.types.UUIDType(binary=False), - sa.ForeignKey( - 'projects.id', - name='fk_devices__projects', ondelete='cascade'), - nullable=False), - sa.Column( - 'cloud_id', sa.Integer, - sa.ForeignKey( - 'clouds.id', name='fk_devices__clouds', ondelete='cascade'), - nullable=False), - sa.Column( - 'region_id', sa.Integer, - sa.ForeignKey( - 'regions.id', name='fk_devices__regions', ondelete='cascade'), - nullable=False), - sa.Column( - 'cell_id', sa.Integer, - sa.ForeignKey( - 'cells.id', name='fk_devices__cells', ondelete='cascade'), - nullable=True), - sa.Column( - 'parent_id', sa.Integer, - sa.ForeignKey('devices.id', name='fk_devices__devices'), - nullable=True), - sa.Column( - 'variable_association_id', sa.Integer, - sa.ForeignKey( - 'variable_association.id', - name='fk_devices__variable_association'), - nullable=False), - sa.Column('type', sa.String(length=50), nullable=False), - sa.Column('device_type', sa.String(length=255), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column( - 'ip_address', - sqlalchemy_utils.types.IPAddressType(length=64), - nullable=False), - sa.Column('active', sa.Boolean(), nullable=True), - sa.Column('note', sa.Text(), nullable=True), - sa.UniqueConstraint('region_id', 'name', - name='uq_device0regionid0name'), - ) - op.create_index( - op.f('ix_devices_cell_id'), 'devices', ['cell_id'], unique=False) - op.create_index( - op.f('ix_devices_project_id'), 'devices', ['project_id'], unique=False) - op.create_index( - op.f('ix_devices_region_id'), 'devices', ['region_id'], unique=False) - op.create_index( - op.f('ix_devices_cloud_id'), 'devices', ['cloud_id'], unique=False) - - op.create_table( - 'hosts', - sa.Column( - 'id', sa.Integer, - sa.ForeignKey( - 'devices.id', name='fk_hosts__devices', ondelete='cascade'), - primary_key=True) - ) - - op.create_table( - 'network_devices', - sa.Column( - 'id', sa.Integer, - sa.ForeignKey( - 'devices.id', - name='fk_network_devices__devices', ondelete='cascade'), - primary_key=True), - sa.Column('model_name', sa.String(length=255), nullable=True), - sa.Column('os_version', sa.String(length=255), nullable=True), - sa.Column('vlans', sa.JSON, - nullable=True) - ) - - op.create_table( - 'labels', - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('updated_at', sa.DateTime, nullable=True), - sa.Column( - 'device_id', sa.Integer, - sa.ForeignKey( - 'devices.id', - name='fk_labels__devices', ondelete='cascade'), - primary_key=True), - sa.Column('label', sa.String(length=255), primary_key=True), - ) - op.create_index( - op.f('ix_devices_labels'), 'labels', ['label', 'device_id']) - - op.create_table( - 'network_interfaces', - sa.Column('created_at', sa.DateTime, nullable=False), - sa.Column('updated_at', sa.DateTime, nullable=True), - sa.Column('id', sa.Integer, primary_key=True), - sa.Column( - 'project_id', sqlalchemy_utils.types.UUIDType(binary=False), - sa.ForeignKey( - 'projects.id', - name='fk_network_interfaces__projects', ondelete='cascade'), - nullable=False), - sa.Column( - 'device_id', sa.Integer, - sa.ForeignKey( - 'devices.id', - name='fk_network_interfaces__devices', ondelete='cascade'), - nullable=False), - sa.Column( - 'network_id', sa.Integer, - sa.ForeignKey( - 'networks.id', - name='fk_network_interfaces__networks'), - nullable=True), - sa.Column( - 'variable_association_id', sa.Integer, - sa.ForeignKey( - 'variable_association.id', - name='fk_network_interfaces__variable_association'), - nullable=False), - sa.Column('name', sa.String(length=255), nullable=True), - sa.Column('interface_type', sa.String(length=255), nullable=True), - sa.Column('vlan_id', sa.Integer, nullable=True), - sa.Column('port', sa.Integer, nullable=True), - sa.Column('vlan', sa.String(length=255), nullable=True), - sa.Column('duplex', sa.String(length=255), nullable=True), - sa.Column('speed', sa.String(length=255), nullable=True), - sa.Column('link', sa.String(length=255), nullable=True), - sa.Column('cdp', sa.String(length=255), nullable=True), - sa.Column('security', sa.String(length=255), nullable=True), - sa.Column( - 'ip_address', - sqlalchemy_utils.types.IPAddressType(length=64), - nullable=False), - sa.UniqueConstraint( - 'device_id', 'name', - name='uq_network_interfaces__device_id__name'), - ) - - -def downgrade(): - op.drop_table('network_interfaces') - op.drop_index(op.f('ix_devices_labels'), table_name='labels') - op.drop_table('labels') - op.drop_table('network_devices') - op.drop_table('hosts') - op.drop_index(op.f('ix_networks_region_id'), table_name='networks') - op.drop_index(op.f('ix_networks_cloud_id'), table_name='networks') - op.drop_index(op.f('ix_networks_project_id'), table_name='networks') - op.drop_index(op.f('ix_networks_cell_id'), table_name='networks') - op.drop_table('networks') - op.drop_index(op.f('ix_devices_region_id'), table_name='devices') - op.drop_index(op.f('ix_devices_cloud_id'), table_name='devices') - op.drop_index(op.f('ix_devices_project_id'), table_name='devices') - op.drop_index(op.f('ix_devices_cell_id'), table_name='devices') - op.drop_table('devices') - op.drop_index(op.f('ix_cells_region_id'), table_name='cells') - op.drop_index(op.f('ix_cells_cloud_id'), table_name='cells') - op.drop_index(op.f('ix_cells_project_id'), table_name='cells') - op.drop_table('cells') - op.drop_index(op.f('ix_users_project_id'), table_name='users') - op.drop_index(op.f('ix_regions_project_id'), table_name='regions') - op.drop_index(op.f('ix_regions_cloud_id'), table_name='regions') - op.drop_table('regions') - op.drop_index(op.f('ix_clouds_project_id'), table_name='clouds') - op.drop_table('clouds') - op.drop_table('users') - op.drop_table('projects') - op.drop_index(op.f('ix_variable_keys'), table_name='variables') - op.drop_table('variables') - op.drop_table('variable_association') diff --git a/craton/db/sqlalchemy/api.py b/craton/db/sqlalchemy/api.py deleted file mode 100644 index 7f3c043..0000000 --- a/craton/db/sqlalchemy/api.py +++ /dev/null @@ -1,1338 +0,0 @@ -"""SQLAlchemy backend implementation.""" - -import enum -import functools -from operator import attrgetter -import sys -import uuid - -import jsonpath_rw as jspath - -from oslo_config import cfg -from oslo_db import exception as db_exc -from oslo_db import options as db_options -from oslo_db.sqlalchemy import session -from oslo_db.sqlalchemy import utils as db_utils -from oslo_log import log - -from sqlalchemy import and_, sql -from sqlalchemy import func as sa_func -import sqlalchemy.orm.exc as sa_exc -from sqlalchemy.orm import with_polymorphic - -from craton import exceptions -from craton.db.sqlalchemy import models - - -CONF = cfg.CONF - -LOG = log.getLogger(__name__) - - -_FACADE = None - -_DEFAULT_SQL_CONNECTION = 'sqlite://' -db_options.set_defaults(cfg.CONF, - connection=_DEFAULT_SQL_CONNECTION) - -MYSQL_INVALID_JSONPATH_EXPRESSION = 3143 -MYSQL_INVALID_JSON_TEXT = 3141 - -JSON_EXCEPTIONS = { - MYSQL_INVALID_JSONPATH_EXPRESSION: exceptions.InvalidJSONPath, - MYSQL_INVALID_JSON_TEXT: exceptions.InvalidJSONValue, -} - - -def _create_facade_lazily(): - global _FACADE - if _FACADE is None: - _FACADE = session.EngineFacade.from_config(cfg.CONF) - return _FACADE - - -def get_engine(): - facade = _create_facade_lazily() - return facade.get_engine() - - -def get_session(**kwargs): - facade = _create_facade_lazily() - return facade.get_session(**kwargs) - - -def get_backend(): - """The backend is this module itself.""" - return sys.modules[__name__] - - -def is_admin_context(context): - """Check if this request had admin project context.""" - if (context.is_admin and context.is_admin_project): - return True - return False - - -def is_project_admin_context(context): - """Check if this request has admin context with in the project.""" - if context.is_admin: - return True - return False - - -def require_admin_context(f): - """Decorator that ensures admin request context.""" - def wrapper(*args, **kwargs): - if not is_admin_context(args[0]): - raise exceptions.AdminRequired() - return f(*args, **kwargs) - return wrapper - - -def require_project_admin_context(f): - """Decorator that ensures admin or project_admin request context.""" - def wrapper(*args, **kwargs): - context = args[0] - if is_project_admin_context(context): - return f(*args, **kwargs) - elif is_project_admin_context(args[0]): - return f(*args, **kwargs) - else: - raise exceptions.AdminRequired() - return wrapper - - -class CRUD(enum.Enum): - CREATE = 'create' - READ = 'read' - UPDATE = 'update' - DELETE = 'delete' - - -def permissions_for(action): - # NOTE(thomasem): Temporary shim applying existing project admin / root - # project checks to generic resource methods, since we don't have RBAC - # fully baked yet. This needs to be removed once we have solved this via - # craton-rbac-support blueprint. This affords us being able to use - # generic resource methods support for resources like Projects, where one - # must be Project admin to interact. - def get_permissions_for(fn): - @functools.wraps(fn) - def wrapper(*args, **kwargs): - # NOTE(thomasem): Standard order of arguments for generic resources - # methods is (context, resources, ...), so args[0] is always - # context and args[1] is always resources. - context = args[0] - resources = args[1] - permissions = { - CRUD.CREATE: { - 'projects': is_project_admin_context(context), - }, - CRUD.READ: { - 'projects': is_project_admin_context(context), - }, - CRUD.UPDATE: { - 'projects': is_project_admin_context(context), - }, - CRUD.DELETE: { - 'projects': is_project_admin_context(context), - } - } - # NOTE(thomasem): Default to True unless otherwise specified in - # permissions dict. - if permissions[action].get(resources, True): - return fn(*args, **kwargs) - else: - raise exceptions.AdminRequired() - return wrapper - return get_permissions_for - - -def model_query(context, model, *args, **kwargs): - """Query helper that accounts for context's `read_deleted` field. - :param context: context to query under - :param model: model to query. Must be a subclass of ModelBase. - :param session: if present, the session to use - :param project_only: if present and context is user-type, then restrict - query to match the context's project_id. - """ - session = kwargs.get('session') or get_session() - project_only = kwargs.get('project_only') - kwargs = dict() - - if project_only and not context.is_admin: - kwargs['project_id'] = context.tenant - - return db_utils.model_query( - model=model, session=session, args=args, **kwargs) - - -def _find_first_key_fragment(root): - """Finds element where first key Field exists.""" - desired = jspath.Fields - if isinstance(root, desired): - return root - while not isinstance(root.left, desired): - root = root.left - return root - - -def _split_key_path_prefix(root): - """Extract first key and initial path from parsed JSON Path. - - This is necessitated by us not actually including the top-most Key - component in the JSON column (Variables.value) in the database, so we only - need a parser to extract the first Key from the expression. The rest can be - handled in the database. - - This essentially takes a parsed JSON Path, finds the first field and - optional Slice or Index. It then uses the value of the first field's value - (fields[0]) as the key and, if a Slice of Index exist on the right - side of the expression, it builds an Array specifier as the initial part of - the path and then returns them. - """ - # NOTE(thomasem): Because of how keys work and our data model, the first - # element should not be anything other than a Fields or a Child fragment. - if not isinstance(root, (jspath.Fields, jspath.Child)): - raise exceptions.InvalidJSONPath() - - path = '' - fragment = _find_first_key_fragment(root) - right = getattr(fragment, 'right', None) - left = getattr(fragment, 'left', fragment) - - if isinstance(right, jspath.Slice) and any([right.start, right.end]): - # NOTE(thomasem): MySQL 5.7 does not support arbitrary slices, only - # '[*]'. - raise exceptions.InvalidJSONPath() - elif isinstance(right, jspath.Slice): - path = '[*]' - elif isinstance(right, jspath.Index): - path = '[{}]'.format(right.index) - - key = left.fields[0] - return key, path - - -def _parse_path_expr(path_expression): - """Split into the first key and the path used for querying MySQL.""" - try: - parsed = jspath.parse(path_expression) - except Exception as e: - # NOTE(thomasem): Unfortunately, this library raises an Exception - # instead of something more specific. - raise exceptions.InvalidJSONPath() from e - - # NOTE(thomasem): Get the first key from the parsed JSON Path expression - # and initial path for the Variables.value JSON column, if there is any. - # There would only be an initial path if there's an Array specifier - # immediately adjacent to the first key, for example: 'foo[5]' or 'foo[*]'. - key, path = _split_key_path_prefix(parsed) - - # NOTE(thomasem): Remove the key we found from the original expression - # since that's not included in the Variables.value JSON column. - key_removed = path_expression[len(key):] - - # NOTE(thomasem): Because the first key could have been wrapped in quotes - # to denote it as a JSON string for handling special characters in the - # key, we will want to find the first delimiter ('.') if one exists to - # know where to slice off the remainder of the path and combine prefix - # and suffix. - path_suffix_start = key_removed.find('.') - if path_suffix_start > -1: - path = '{}{}'.format(path, key_removed[path_suffix_start:]) - return key, '${}'.format(path) - - -def _json_path_clause(kv_pair): - path_expr, value = kv_pair - key, path = _parse_path_expr(path_expr) - - json_match = sa_func.json_contains( - sa_func.json_extract(models.Variable.value, path), value) - - # NOTE(thomasem): Order is important here. MySQL will short-circuit and - # not properly validate the JSON Path expression when the key doesn't exist - # if the key match is first int he and_(...) expression. So, let's put - # the json_match first. - return and_(json_match, models.Variable.key == key) - - -def _get_variables(query): - try: - return set(query) - except db_exc.DBError as e: - orig = e.inner_exception.orig - code = orig.args[0] - if orig and code in JSON_EXCEPTIONS: - raise JSON_EXCEPTIONS[code]() from e - raise - - -def _matching_resources(query, resource_cls, get_descendants, kv): - - # NOTE(jimbaker) Build a query that determine all resources that - # match this key/value, taking in account variable - # resolution. Note that this value is generalized to be a JSON - # path; and the resolution is inverted by finding any possible - # descendants. - - kv_pairs = list(kv.items()) - matches = {} - for kv_pair in kv_pairs: - match = matches[kv_pair] = set() - kv_clause = _json_path_clause(kv_pair) - matching_variables = _get_variables( - query.session.query(models.Variable).filter(kv_clause)) - - for variable in matching_variables: - if isinstance(variable.parent, resource_cls): - match.add(variable.parent) - - # NOTE(jimbaker) now check for descendent overrides; in - # particular, it's possible that a chain of overrides - # could occur such that the original top value was - # restored. - # - # Because all of the matching variables were returned by - # the query; and SQLAlchemy guarantees that within the - # scope of a transaction ("unit of work") that a given - # object will have the same identity, we can check with - # respect to matching_variables. Do note that arbitrary - # additional object graph queries may be done to check the - # resolution ordering. - - for descendant in get_descendants(variable.parent): - for level in descendant.resolution_order: - desc_variable = level._variables.get(variable.key) - if desc_variable is not None: - if desc_variable in matching_variables: - match.add(descendant) - break - - # NOTE(jimbaker) For now, we simply match for the conjunction - # ("and") of all the supplied kv pairs we are matching - # against. Generalize in the future as desired with other boolean - # logic. - _, first_match = matches.popitem() - if matches: - resources = first_match.intersection(*matches.values()) - else: - resources = first_match - return resources - - -def _get_devices(parent): - if isinstance(parent, models.Device): - return parent.descendants - else: - return parent.devices - - -_resource_mapping = { - models.Project: ([], None), - models.Cloud: ([models.Project], attrgetter('clouds')), - models.Region: ([models.Project, models.Cloud], attrgetter('regions')), - models.Cell: ( - [models.Project, models.Cloud, models.Region], - attrgetter('cells')), - models.Network: ( - [models.Project, models.Cloud, models.Region, models.Cell], - attrgetter('networks')), - models.Device: ( - [models.Project, models.Cloud, models.Region, models.Cell, - models.Device], - _get_devices), -} - - -def matching_resources(query, resource_cls, kv, resolved): - def get_desc(parent): - parent_classes, getter = _resource_mapping[resource_cls] - # NOTE(thomasem): If we're not resolving, there are no descendants - # to process, so return an empty list. - if resolved and any(isinstance(parent, cls) for cls in parent_classes): - return getter(parent) - else: - return [] - - return _matching_resources(query, resource_cls, get_desc, kv) - - -def _add_var_filters_to_query(query, model, var_filters, resolved=True): - # vars filters are of form ?vars=a:b[,c:d,...] - the filters in - # this case are intersecting ("and" queries) - kv = dict(pairing.split(':', 1) for pairing in var_filters) - resource_ids = set( - resource.id - for resource in matching_resources(query, model, kv, resolved) - ) - if not resource_ids: - # short circuit; this also avoids SQLAlchemy reporting that it is - # working with an empty "in" clause - return query.filter(sql.false()) - return query.filter(model.id.in_(resource_ids)) - - -def add_var_filters_to_query(query, model, filters): - var_filters = filters['vars'].split(',') - resolved = bool(filters.get('resolved-values')) - return _add_var_filters_to_query(query, model, var_filters, - resolved=resolved) - - -def get_user_info(context, username): - """Get user info.""" - query = model_query(context, models.User, project_only=True) - query = query.filter_by(username=username) - try: - return query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - except Exception as err: - raise exceptions.UnknownException(message=err) - - -def _get_resource_model(resource): - resource_models = { - "cells": models.Cell, - "devices": with_polymorphic(models.Device, "*"), - "hosts": with_polymorphic(models.Device, models.Host), - "network-devices": with_polymorphic( - models.Device, models.NetworkDevice - ), - "networks": models.Network, - "regions": models.Region, - "clouds": models.Cloud, - "projects": models.Project, - } - return resource_models[resource] - - -@permissions_for(CRUD.READ) -def resource_get_by_id( - context, resources, resource_id, session=None, for_update=False - ): - """Get resource for the unique resource id.""" - model = _get_resource_model(resources) - - query = model_query(context, model, project_only=True, session=session) - - if resources in ("hosts", "network-devices"): - query = query.filter_by(type=resources.replace("-", "_")) - - query = query.filter_by(id=resource_id) - - if for_update: - query = query.with_for_update() - - try: - resource = query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - else: - return resource - - -@permissions_for(CRUD.UPDATE) -def variables_update_by_resource_id(context, resources, resource_id, data): - """Update/create existing resource's variables.""" - session = get_session() - with session.begin(): - resource = resource_get_by_id( - context, resources, resource_id, session, for_update=True - ) - - resource.variables.update(data) - return resource - - -@permissions_for(CRUD.DELETE) -def variables_delete_by_resource_id(context, resources, resource_id, data): - """Delete the existing variables, if present, from resource's data.""" - session = get_session() - with session.begin(): - resource = resource_get_by_id( - context, resources, resource_id, session, for_update=True - ) - - for key in data: - try: - del resource.variables[key] - except KeyError: - pass - return resource - - -def devices_get_all(context, filters, pagination_params): - """Get all available devices.""" - session = get_session() - devices = with_polymorphic(models.Device, "*") - query = model_query(context, devices, project_only=True, session=session) - - if "parent_id" in filters and filters.get("descendants"): - parent = query.filter_by(id=filters["parent_id"]).one() - query = query.filter( - models.Device.id.in_(device.id for device in parent.descendants) - ) - elif "parent_id" in filters: - query = query.filter_by(parent_id=filters["parent_id"]) - - if "region_id" in filters: - query = query.filter_by(region_id=filters["region_id"]) - if "cloud_id" in filters: - query = query.filter_by(cloud_id=filters["cloud_id"]) - if "cell_id" in filters: - query = query.filter_by(cell_id=filters["cell_id"]) - if "active" in filters: - query = query.filter_by(active=filters["active"]) - - return _paginate(context, query, models.Device, session, filters, - pagination_params) - - -def _device_labels_update(context, device_type, device_id, labels): - """Update labels for the given device. Add the label if it is not present - in host labels list, otherwise do nothing.""" - session = get_session() - with session.begin(): - devices = with_polymorphic(models.Device, '*') - query = model_query(context, devices, session=session, - project_only=True) - query = query.filter_by(type=device_type) - query = query.filter_by(id=device_id) - try: - device = query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - - device.labels.update(labels["labels"]) - device.save(session) - return device - - -def _device_labels_delete(context, device_type, device_id, labels): - """Delete labels from the device labels list if it matches - the given label in the query, otherwise do nothing.""" - session = get_session() - with session.begin(): - devices = with_polymorphic(models.Device, '*') - query = model_query(context, devices, session=session, - project_only=True) - query = query.filter_by(type=device_type) - query = query.filter_by(id=device_id) - try: - device = query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - - for label in labels["labels"]: - device.labels.discard(label) - device.save(session) - return device - - -def cells_get_all(context, filters, pagination_params): - """Get all cells.""" - session = get_session() - query = model_query(context, models.Cell, project_only=True, - session=session) - - if "id" in filters: - query = query.filter_by(id=filters["id"]) - if "region_id" in filters: - query = query.filter_by(region_id=filters["region_id"]) - if "cloud_id" in filters: - query = query.filter_by(cloud_id=filters["cloud_id"]) - if "name" in filters: - query = query.filter_by(name=filters["name"]) - if "vars" in filters: - query = add_var_filters_to_query(query, models.Cell, filters) - - return _paginate(context, query, models.Cell, session, filters, - pagination_params) - - -def cells_get_by_id(context, cell_id): - """Get cell details given for a given cell id.""" - try: - query = model_query(context, models.Cell).\ - filter_by(id=cell_id) - return query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - - -def cells_create(context, values): - """Create a new cell.""" - session = get_session() - cell = models.Cell() - with session.begin(): - try: - cell.update(values) - cell.save(session) - except db_exc.DBDuplicateEntry: - raise exceptions.DuplicateCell() - return cell - - -def cells_update(context, cell_id, values): - """Update an existing cell.""" - session = get_session() - with session.begin(): - query = model_query(context, models.Cell, session=session, - project_only=True) - query = query.filter_by(id=cell_id) - cell_ref = query.with_for_update().one() - cell_ref.update(values) - cell_ref.save(session) - return cell_ref - - -def cells_delete(context, cell_id): - """Delete an existing cell.""" - session = get_session() - with session.begin(): - query = model_query(context, models.Cell, session=session, - project_only=True) - query = query.filter_by(id=cell_id) - query.delete() - - -def regions_get_all(context, filters, pagination_params): - """Get all available regions.""" - session = get_session() - query = model_query(context, models.Region, project_only=True, - session=session) - - if "vars" in filters: - query = add_var_filters_to_query(query, models.Region, filters) - if "cloud_id" in filters: - query = query.filter_by(cloud_id=filters["cloud_id"]) - - return _paginate(context, query, models.Region, session, filters, - pagination_params) - - -def regions_get_by_name(context, name): - """Get cell detail for the region with given name.""" - query = model_query(context, models.Region, project_only=True) - query = query.filter_by(name=name) - try: - return query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - - -def regions_get_by_id(context, region_id): - """Get cell detail for the region with given id.""" - query = model_query(context, models.Region, project_only=True) - query = query.filter_by(id=region_id) - try: - return query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - - -def regions_create(context, values): - """Create a new region.""" - session = get_session() - region = models.Region() - with session.begin(): - try: - region.update(values) - region.save(session) - except db_exc.DBDuplicateEntry: - raise exceptions.DuplicateRegion() - return region - - -def regions_update(context, region_id, values): - """Update an existing region.""" - session = get_session() - with session.begin(): - query = model_query(context, models.Region, session=session, - project_only=True) - query = query.filter_by(id=region_id) - region_ref = query.with_for_update().one() - region_ref.update(values) - region_ref.save(session) - return region_ref - - -def regions_delete(context, region_id): - """Delete an existing region.""" - session = get_session() - with session.begin(): - query = model_query(context, models.Region, session=session, - project_only=True) - query = query.filter_by(id=region_id) - query.delete() - return - - -def clouds_get_all(context, filters, pagination_params): - """Get all available clouds.""" - session = get_session() - query = model_query(context, models.Cloud, project_only=True, - session=session) - - if "vars" in filters: - query = add_var_filters_to_query(query, models.Cloud, filters) - - return _paginate(context, query, models.Cloud, session, filters, - pagination_params) - - -def clouds_get_by_name(context, name): - """Get cell detail for the cloud with given name.""" - query = model_query(context, models.Cloud, project_only=True) - query = query.filter_by(name=name) - try: - return query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - - -def clouds_get_by_id(context, cloud_id): - """Get cell detail for the cloud with given id.""" - query = model_query(context, models.Cloud, project_only=True) - query = query.filter_by(id=cloud_id) - try: - return query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - - -def clouds_create(context, values): - """Create a new cloud.""" - session = get_session() - cloud = models.Cloud() - with session.begin(): - try: - cloud.update(values) - cloud.save(session) - except db_exc.DBDuplicateEntry: - raise exceptions.DuplicateCloud() - return cloud - - -def clouds_update(context, cloud_id, values): - """Update an existing cloud.""" - session = get_session() - with session.begin(): - query = model_query(context, models.Cloud, session=session, - project_only=True) - query = query.filter_by(id=cloud_id) - cloud_ref = query.with_for_update().one() - cloud_ref.update(values) - cloud_ref.save(session) - return cloud_ref - - -def clouds_delete(context, cloud_id): - """Delete an existing cloud.""" - session = get_session() - with session.begin(): - query = model_query(context, models.Cloud, session=session, - project_only=True) - query = query.filter_by(id=cloud_id) - query.delete() - return - - -def hosts_get_all(context, filters, pagination_params): - """Get all hosts matching filters. - - :param filters: filters which contains different keys/values to match. - Supported filters are region_id, name, ip_address, id, cell, device_type, - label and vars. - """ - session = get_session() - host_devices = with_polymorphic(models.Device, [models.Host]) - query = model_query(context, host_devices, project_only=True, - session=session) - query = query.filter_by(type='hosts') - - if "region_id" in filters: - query = query.filter_by(region_id=filters["region_id"]) - if "cloud_id" in filters: - query = query.filter_by(cloud_id=filters["cloud_id"]) - if "name" in filters: - query = query.filter_by(name=filters["name"]) - if "ip_address" in filters: - query = query.filter_by(ip_address=filters["ip_address"]) - if "id" in filters: - query = query.filter_by(id=filters["id"]) - if "cell_id" in filters: - query = query.filter_by(cell_id=filters["cell_id"]) - if "device_type" in filters: - query = query.filter_by(device_type=filters["device_type"]) - if "label" in filters: - query = query.filter(models.Device.related_labels.any( - models.Label.label == filters["label"])) - if "vars" in filters: - query = add_var_filters_to_query(query, models.Device, filters) - - return _paginate(context, query, models.Host, session, filters, - pagination_params) - - -def hosts_get_by_id(context, host_id): - """Get details for the host with given id.""" - host_devices = with_polymorphic(models.Device, '*') - query = model_query(context, host_devices, project_only=True).\ - filter_by(id=host_id) - query = query.filter_by(type='hosts') - try: - result = query.one() - LOG.info("Result by host id %s" % result) - except sa_exc.NoResultFound: - LOG.error("No result found for host with id %s" % host_id) - raise exceptions.NotFound() - except Exception as err: - raise exceptions.UnknownException(message=err) - return result - - -def hosts_create(context, values): - """Create a new host.""" - session = get_session() - host = models.Host() - with session.begin(): - try: - host.update(values) - host.save(session) - except db_exc.DBDuplicateEntry: - raise exceptions.DuplicateDevice() - return host - - -def hosts_update(context, host_id, values): - """Update an existing host.""" - session = get_session() - with session.begin(): - host_devices = with_polymorphic(models.Device, '*') - query = model_query(context, host_devices, session=session, - project_only=True) - query = query.filter_by(id=host_id) - host_ref = query.with_for_update().one() - try: - host_ref.update(values) - except exceptions.ParentIDError as e: - raise exceptions.BadRequest(message=str(e)) - host_ref.save(session) - return host_ref - - -def hosts_delete(context, host_id): - """Delete an existing host.""" - session = get_session() - with session.begin(): - host_devices = with_polymorphic(models.Device, '*') - query = model_query(context, host_devices, session=session, - project_only=True) - query = query.filter_by(type='hosts') - query = query.filter_by(id=host_id) - query.delete() - return - - -def hosts_labels_update(context, host_id, labels): - """Update labels for host. Add the label if it is not present - in host labels list, otherwise do nothing.""" - return _device_labels_update(context, 'hosts', host_id, labels) - - -def hosts_labels_delete(context, host_id, labels): - """Delete labels from the host labels list if it matches - the given label in the query, otherwise do nothing.""" - return _device_labels_delete(context, 'hosts', host_id, labels) - - -@require_admin_context -def projects_get_all(context, filters, pagination_params): - """Get all the projects.""" - session = get_session() - query = model_query(context, models.Project, session=session) - if "vars" in filters: - query = add_var_filters_to_query(query, models.Project, filters) - return _paginate(context, query, models.Project, session, filters, - pagination_params) - - -@require_admin_context -def projects_get_by_name(context, project_name, filters, pagination_params): - """Get all projects that match the given name.""" - query = model_query(context, models.Project) - query = query.filter(models.Project.name.like(project_name)) - if "vars" in filters: - query = add_var_filters_to_query(query, models.Project, filters) - try: - return _paginate(context, query, models.Project, session, filters, - pagination_params) - except sa_exc.NoResultFound: - raise exceptions.NotFound() - except Exception as err: - raise exceptions.UnknownException(message=err) - - -@require_project_admin_context -def projects_get_by_id(context, project_id): - """Get project by its id.""" - query = model_query(context, models.Project) - query = query.filter_by(id=project_id) - try: - return query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - except Exception as err: - raise exceptions.UnknownException(message=err) - - -@require_admin_context -def projects_create(context, values): - """Create a new project with given values.""" - session = get_session() - project = models.Project() - if not values.get('id'): - values['id'] = uuid.uuid4() - with session.begin(): - project.update(values) - project.save(session) - return project - - -@require_admin_context -def projects_delete(context, project_id): - """Delete an existing project given by its id.""" - session = get_session() - with session.begin(): - query = model_query(context, models.Project, session=session) - query = query.filter_by(id=project_id) - query.delete() - - -@require_project_admin_context -def users_get_all(context, filters, pagination_params): - """Get all the users.""" - session = get_session() - if is_admin_context(context): - LOG.info("Getting all users as root user") - query = model_query(context, models.User, session=session) - else: - LOG.info("Getting all users as project admin user") - query = model_query(context, models.User, project_only=True, - session=session) - query = query.filter_by(project_id=context.tenant) - - return _paginate(context, query, models.User, session, filters, - pagination_params) - - -@require_project_admin_context -def users_get_by_name(context, user_name, filters, pagination_params): - """Get all users that match the given username.""" - session = get_session() - if is_admin_context(context): - query = model_query(context, models.User, session=session) - else: - query = model_query(context, models.User, project_only=True, - session=session) - - query = query.filter_by(username=user_name) - return _paginate(context, query, models.User, session, filters, - pagination_params) - - -@require_project_admin_context -def users_get_by_id(context, user_id): - """Get user by its id.""" - if is_admin_context(context): - query = model_query(context, models.User) - else: - query = model_query(context, models.User, project_only=True) - - query = query.filter_by(id=user_id) - try: - return query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - - -@require_project_admin_context -def users_create(context, values): - """Create a new user with given values.""" - session = get_session() - user = models.User() - with session.begin(): - user.update(values) - user.save(session) - return user - - -@require_project_admin_context -def users_delete(context, user_id): - """Delete an existing user given by its id.""" - LOG.info("Deleting user with id %s" % user_id) - session = get_session() - with session.begin(): - query = model_query(context, models.User, session=session) - query = query.filter_by(id=user_id) - query.delete() - return - - -def networks_get_all(context, filters, pagination_params): - """Get all networks.""" - session = get_session() - query = model_query(context, models.Network, project_only=True, - session=session) - - if "region_id" in filters: - query = query.filter_by(region_id=filters["region_id"]) - if "id" in filters: - query = query.filter_by(id=filters["id"]) - if "network_type" in filters: - query = query.filter_by(network_type=filters["network_type"]) - if "cell_id" in filters: - query = query.filter_by(cell_id=filters["cell_id"]) - if "name" in filters: - query = query.filter_by(name=filters["name"]) - if "vars" in filters: - query = add_var_filters_to_query(query, models.Network, filters) - - return _paginate(context, query, models.Network, session, filters, - pagination_params) - - -def networks_get_by_id(context, network_id): - """Get a given network by its id.""" - query = model_query(context, models.Network, project_only=True) - query = query.filter_by(id=network_id) - try: - result = query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - - return result - - -def networks_create(context, values): - """Create a new network.""" - session = get_session() - network = models.Network() - with session.begin(): - try: - network.update(values) - network.save(session) - except db_exc.DBDuplicateEntry: - raise exceptions.DuplicateNetwork() - return network - - -def networks_update(context, network_id, values): - """Update an existing network.""" - session = get_session() - with session.begin(): - query = model_query(context, models.Network, session=session, - project_only=True) - query = query.filter_by(id=network_id) - network_ref = query.with_for_update().one() - network_ref.update(values) - network_ref.save(session) - return network_ref - - -def networks_delete(context, network_id): - """Delete existing network.""" - session = get_session() - with session.begin(): - query = model_query(context, models.Network, session=session, - project_only=True) - query = query.filter_by(id=network_id) - query.delete() - return - - -def network_devices_get_all(context, filters, pagination_params): - """Get all network devices.""" - session = get_session() - devices = with_polymorphic(models.Device, [models.NetworkDevice]) - query = model_query(context, devices, project_only=True, session=session) - query = query.filter_by(type='network_devices') - - if "region_id" in filters: - query = query.filter_by(region_id=filters["region_id"]) - if "cloud_id" in filters: - query = query.filter_by(cloud_id=filters["cloud_id"]) - if "name" in filters: - query = query.filter_by(name=filters["name"]) - if "ip_address" in filters: - query = query.filter_by(ip_address=filters["ip_address"]) - if "id" in filters: - query = query.filter_by(id=filters["id"]) - if "cell_id" in filters: - query = query.filter_by(cell_id=filters["cell_id"]) - if "device_type" in filters: - query = query.filter_by(device_type=filters["device_type"]) - if "vars" in filters: - query = add_var_filters_to_query(query, models.Device, filters) - - return _paginate(context, query, models.Device, session, filters, - pagination_params) - - -def network_devices_get_by_id(context, network_device_id): - """Get a given network device by its id.""" - devices = with_polymorphic(models.Device, [models.NetworkDevice]) - query = model_query(context, devices, project_only=True) - query = query.filter_by(type='network_devices') - query = query.filter_by(id=network_device_id) - try: - result = query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - - return result - - -def network_devices_create(context, values): - """Create a new network device.""" - session = get_session() - device = models.NetworkDevice() - with session.begin(): - device.update(values) - device.save(session) - return device - - -def network_devices_update(context, network_device_id, values): - """Update existing network device""" - session = get_session() - with session.begin(): - device = with_polymorphic(models.Device, '*') - query = model_query(context, device, session=session, - project_only=True) - query = query.filter_by(type='network_devices') - query = query.filter_by(id=network_device_id) - network_device_ref = query.with_for_update().one() - try: - network_device_ref.update(values) - except exceptions.ParentIDError as e: - raise exceptions.BadRequest(message=str(e)) - network_device_ref.save(session) - return network_device_ref - - -def network_devices_delete(context, network_device_id): - """Delete existing network device.""" - session = get_session() - with session.begin(): - device = with_polymorphic(models.Device, '*') - query = model_query(context, device, session=session, - project_only=True) - query = query.filter_by(type='network_devices') - query = query.filter_by(id=network_device_id) - query.delete() - - -def network_devices_labels_update(context, device_id, labels): - """Update labels for a network device. Add the label if it is not present - in host labels list, otherwise do nothing.""" - return _device_labels_update(context, 'network_devices', device_id, labels) - - -def network_devices_labels_delete(context, device_id, labels): - """Delete labels from the network device labels list if it matches - the given label in the query, otherwise do nothing.""" - return _device_labels_delete(context, 'network_devices', device_id, labels) - - -def network_interfaces_get_all(context, filters, pagination_params): - """Get all network interfaces.""" - session = get_session() - query = model_query(context, models.NetworkInterface, project_only=True, - session=session) - - if "device_id" in filters: - query = query.filter_by(device_id=filters["device_id"]) - if "id" in filters: - query = query.filter_by(id=filters["id"]) - if "ip_address" in filters: - query = query.filter_by(ip_address=filters["ip_address"]) - if "interface_type" in filters: - query = query.filter_by(interface_type=filters["interface_type"]) - - return _paginate(context, query, models.NetworkInterface, session, - filters, pagination_params) - - -def network_interfaces_get_by_id(context, interface_id): - """Get a given network interface by its id.""" - query = model_query(context, models.NetworkInterface, project_only=True) - query = query.filter_by(id=interface_id) - try: - result = query.one() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - - return result - - -def network_interfaces_create(context, values): - """Create a new network interface.""" - session = get_session() - interface = models.NetworkInterface() - with session.begin(): - interface.update(values) - interface.save(session) - return interface - - -def network_interfaces_update(context, interface_id, values): - """Update an existing network interface.""" - session = get_session() - with session.begin(): - query = model_query(context, models.NetworkInterface, session=session, - project_only=True) - query = query.filter_by(id=interface_id) - network_interface_ref = query.with_for_update().one() - network_interface_ref.update(values) - network_interface_ref.save(session) - return network_interface_ref - - -def network_interfaces_delete(context, interface_id): - """Delete existing network interface.""" - session = get_session() - with session.begin(): - query = model_query(context, models.NetworkInterface, session=session, - project_only=True) - query = query.filter_by(id=interface_id) - query.delete() - - -def _marker_from(context, session, model, marker, project_only): - if marker is None: - return None - - query = model_query(context, model, session=session, - project_only=project_only) - return query.filter_by(id=marker).one() - - -def _get_previous(query, model, current_marker, page_size, filters): - # NOTE(sigmavirus24): To get the previous items based on the existing - # filters, we need only reverse the direction that the user requested. - original_sort_dir = filters['sort_dir'] - sort_dir = 'desc' - if original_sort_dir == 'desc': - sort_dir = 'asc' - - results = db_utils.paginate_query( - query, model, - limit=page_size, - sort_keys=filters['sort_keys'], - sort_dir=sort_dir, - marker=current_marker, - ).all() - - if not results: - return None - - return results[-1].id - - -def _link_params_for(query, model, filters, pagination_params, - current_marker, current_results): - links = {} - # We can discern our base parameters for our links - base_parameters = {} - for (key, value) in filters.items(): - # NOTE(thomasem): Sometimes the filters that are passed in will include - # a None value from the schema. This causes it to not get included - # in the link, since a None value indicates you did not include a value - # for that parameter in the original call. - if value is None: - continue - # This takes care of things like sort_keys which may have multiple - # values - if isinstance(value, list): - value = ','.join(value) - base_parameters[key] = value - base_parameters['limit'] = pagination_params['limit'] - generate_links = ('first', 'self') - - if current_results: - next_marker = current_results[-1] - # If there are results to return, there may be a next link to follow - generate_links += ('next',) - - # We start our links dictionary with some basics - for relation in generate_links: - params = base_parameters.copy() - if relation == 'self': - if pagination_params['marker'] is not None: - params['marker'] = pagination_params['marker'] - elif relation == 'next': - params['marker'] = next_marker.id - links[relation] = params - - params = base_parameters.copy() - previous_marker = None - if current_marker is not None: - previous_marker = _get_previous( - query, model, current_marker, pagination_params['limit'], filters, - ) - if previous_marker is not None: - params['marker'] = previous_marker - links['prev'] = params - return links - - -def _paginate(context, query, model, session, filters, pagination_params, - project_only=False): - # NOTE(sigmavirus24) Retrieve the instance of the model represented by the - # marker. - try: - marker = _marker_from(context, session, model, - pagination_params['marker'], - project_only) - except sa_exc.NoResultFound: - raise exceptions.BadRequest( - message='Marker "{}" does not exist'.format( - pagination_params['marker'] - ) - ) - except Exception as err: - raise exceptions.UnknownException(message=err) - - filters.setdefault('sort_keys', ['created_at', 'id']) - filters.setdefault('sort_dir', 'asc') - # Retrieve the results based on the marker and the limit - try: - results = db_utils.paginate_query( - query, model, - limit=pagination_params['limit'], - sort_keys=filters['sort_keys'], - sort_dir=filters['sort_dir'], - marker=marker, - ).all() - except sa_exc.NoResultFound: - raise exceptions.NotFound() - except db_exc.InvalidSortKey as invalid_key: - raise exceptions.BadRequest( - message='"{}" is an invalid sort key'.format(invalid_key.key) - ) - except Exception as err: - raise exceptions.UnknownException(message=err) - - try: - links = _link_params_for( - query, model, filters, pagination_params, marker, results, - ) - except Exception as err: - raise exceptions.UnknownException(message=err) - - return results, links diff --git a/craton/db/sqlalchemy/migration.py b/craton/db/sqlalchemy/migration.py deleted file mode 100644 index 4784efd..0000000 --- a/craton/db/sqlalchemy/migration.py +++ /dev/null @@ -1,114 +0,0 @@ -import alembic -import os -import uuid - -from alembic import config as alembic_config -import alembic.migration as alembic_migration -from sqlalchemy import create_engine -from sqlalchemy import exc -from sqlalchemy.orm import sessionmaker -import sqlalchemy.orm.exc as sa_exc -from oslo_db.sqlalchemy import enginefacade - -from craton.api.v1.resources import utils -from craton.db.sqlalchemy import models - - -def _alembic_config(): - path = os.path.join(os.path.dirname(__file__), 'alembic.ini') - config = alembic_config.Config(path) - return config - - -def version(config=None, engine=None): - """Current database version.""" - if engine is None: - engine = enginefacade.get_legacy_facade().get_engine() - with engine.connect() as conn: - context = alembic_migration.MigrationContext.configure(conn) - return context.get_current_revision() - - -def upgrade(revision, config=None): - """Used for upgrading database. - :param version: Desired database version - """ - revision = revision or 'head' - config = config or _alembic_config() - - alembic.command.upgrade(config, revision or 'head') - - -def stamp(revision, config=None): - """Stamps database with provided revision. - Don't run any migrations. - :param revision: Should match one from repository or head - to stamp - database with most recent revision - """ - config = config or _alembic_config() - return alembic.command.stamp(config, revision=revision) - - -def revision(message=None, autogenerate=False, config=None): - """Creates template for migration. - :param message: Text that will be used for migration title - :param autogenerate: If True - generates diff based on current database - state - """ - config = config or _alembic_config() - return alembic.command.revision(config, message=message, - autogenerate=autogenerate) - - -def create_bootstrap_project(name, project_id=None, db_uri=None): - """Creates a new project. - :param name: Name of the new project - """ - if not project_id: - project_id = str(uuid.uuid4()) - engine = create_engine(db_uri) - Session = sessionmaker(bind=engine) - session = Session() - project = models.Project(name=name, - id=project_id) - - try: - project = session.query(models.Project).filter_by(name=name).one() - except sa_exc.NoResultFound: - session.add(project) - session.commit() - - return project - - -def create_bootstrap_user(project_id, username, db_uri=None): - """Creates a new project. - :param username: Username for new user - :param project_id: Project ID for the user - """ - engine = create_engine(db_uri) - Session = sessionmaker(bind=engine) - session = Session() - - api_key = utils.gen_api_key() - - user = models.User(project_id=project_id, - username=username, - api_key=api_key, - is_admin=True, - is_root=True) - try: - session.add(user) - session.commit() - except exc.IntegrityError as err: - if err.orig.args[0] == 1062: - # NOTE(sulo): 1062 is the normal sql duplicate error code - # also see pymysql/constants/ER.py#L65 - session.rollback() - user = session.query(models.User).filter_by(username=username) - user = user.filter_by(project_id=project_id).one() - return user - else: - raise - - return user diff --git a/craton/db/sqlalchemy/models.py b/craton/db/sqlalchemy/models.py deleted file mode 100644 index 38adbc4..0000000 --- a/craton/db/sqlalchemy/models.py +++ /dev/null @@ -1,553 +0,0 @@ -"""Models inventory, as defined using SQLAlchemy ORM - -Craton uses the following related aspects of inventory: - -* Device inventory, with devices are further organized by region, - cell, and labels. Variables are associated with all of these - entities, with the ability to override via resolution and to track - with blaming. This in terms forms the foundation of an *inventory - fabric*, which is implemented above this level. - -* Workflows are run against this inventory, taking in account the - variable configuration; as well as any specifics baked into the - workflow itself. - -""" - -from collections import ChainMap, deque, OrderedDict -import itertools - -from oslo_db.sqlalchemy import models -from sqlalchemy import ( - Boolean, Column, ForeignKey, Integer, String, Text, UniqueConstraint, JSON) -from sqlalchemy.ext.associationproxy import association_proxy -from sqlalchemy.ext.declarative import declarative_base, declared_attr -from sqlalchemy.ext.declarative.api import _declarative_constructor -from sqlalchemy.orm import backref, object_mapper, relationship, validates -from sqlalchemy.orm.collections import attribute_mapped_collection -from sqlalchemy_utils.types.ip_address import IPAddressType -from sqlalchemy_utils.types.uuid import UUIDType - -from craton import exceptions -from craton.db.api import Blame - - -# TODO(jimbaker) set up table args for a given database/storage -# engine, as configured. See -# https://github.com/rackerlabs/craton/issues/19 - - -class CratonBase(models.ModelBase, models.TimestampMixin): - def __repr__(self): - mapper = object_mapper(self) - cols = getattr(self, '_repr_columns', mapper.primary_key) - items = [(p.key, getattr(self, p.key)) - for p in [ - mapper.get_property_by_column(c) for c in cols]] - return "{0}({1})".format( - self.__class__.__name__, - ', '.join(['{0}={1!r}'.format(*item) for item in items])) - - -def _variable_mixin_aware_constructor(self, **kwargs): - # The standard default for the underlying relationship for - # variables sets it to None, which means it cannot directly be - # used as a mappable collection. Cure the problem accordingly with - # a different default. - if isinstance(self, VariableMixin): - kwargs.setdefault('variables', {}) - return _declarative_constructor(self, **kwargs) - - -Base = declarative_base( - cls=CratonBase, constructor=_variable_mixin_aware_constructor) - - -class VariableAssociation(Base): - """Associates a collection of Variable key-value objects - with a particular parent. - - """ - __tablename__ = "variable_association" - - id = Column(Integer, primary_key=True) - discriminator = Column(String(50), nullable=False) - """Refers to the type of parent, such as 'cell' or 'device'""" - - variables = relationship( - 'Variable', - collection_class=attribute_mapped_collection('key'), - back_populates='association', - cascade='all, delete-orphan', lazy='joined', - ) - - def _variable_creator(key, value): - # Necessary to create a single key/value setting, even once - # the corresponding variable association has been setup - return Variable(key=key, value=value) - - values = association_proxy( - 'variables', 'value', creator=_variable_creator) - - __mapper_args__ = { - 'polymorphic_on': discriminator, - } - - -class Variable(Base): - """The Variable class. - - This represents all variable records in a single table. - """ - __tablename__ = 'variables' - association_id = Column( - Integer, - ForeignKey(VariableAssociation.id, - name='fk_variables_variable_association'), - primary_key=True) - # Use "key_", "value_" to avoid the use of reserved keywords in - # MySQL. This difference in naming is only visible in the use of - # raw SQL. - key = Column('key_', String(255), primary_key=True) - value = Column('value_', JSON) - association = relationship( - VariableAssociation, back_populates='variables', - ) - parent = association_proxy('association', 'parent') - - def __repr__(self): - return '%s(key=%r, value=%r)' % \ - (self.__class__.__name__, self.key, self.value) - - -# The VariableMixin mixin is adapted from this example code: -# http://docs.sqlalchemy.org/en/latest/_modules/examples/generic_associations/discriminator_on_association.html -# This blog post goes into more details about the underlying modeling: -# http://techspot.zzzeek.org/2007/05/29/polymorphic-associations-with-sqlalchemy/ - -class VariableMixin(object): - """VariableMixin mixin, creates a relationship to - the variable_association table for each parent. - - """ - - @declared_attr - def variable_association_id(cls): - return Column( - Integer, - ForeignKey(VariableAssociation.id, - name='fk_%ss_variable_association' % - cls.__name__.lower())) - - @declared_attr - def variable_association(cls): - name = cls.__name__ - discriminator = name.lower() - - # Defines a polymorphic class to distinguish variables stored - # for regions, cells, etc. - cls.variable_assoc_cls = assoc_cls = type( - "%sVariableAssociation" % name, - (VariableAssociation,), - { - '__tablename__': None, # because mapping into a shared table - '__mapper_args__': { - 'polymorphic_identity': discriminator - } - }) - - def _assoc_creator(kv): - assoc = assoc_cls() - for key, value in kv.items(): - assoc.variables[key] = Variable(key=key, value=value) - return assoc - - cls._variables = association_proxy( - 'variable_association', 'variables', creator=_assoc_creator) - - # Using a composite associative proxy here enables returning the - # underlying values for a given key, as opposed to the - # Variable object; we need both. - cls.variables = association_proxy( - 'variable_association', 'values', creator=_assoc_creator) - - def with_characteristic(self, key, value): - return self._variables.any(key=key, value=value) - - cls.with_characteristic = classmethod(with_characteristic) - - rel = relationship( - assoc_cls, - collection_class=attribute_mapped_collection('key'), - cascade='all, delete-orphan', lazy='joined', - single_parent=True, - backref=backref('parent', uselist=False)) - - return rel - - # For resolution ordering, the default is to just include - # self. Override as desired for other resolution policy. - - @property - def resolution_order(self): - return [self] - - @property - def resolution_order_variables(self): - return [obj.variables for obj in self.resolution_order] - - @property - def resolved(self): - """Provides a mapping that uses scope resolution for variables""" - return ChainMap(*self.resolution_order_variables) - - def blame(self, keys=None): - """Determines the sources of how variables have been set. - :param keys: keys to check sourcing, or all keys if None - - Returns the (source, variable) in a named tuple; note that - variable contains certain audit/governance information - (created_at, modified_at). - - TODO(jimbaker) further extend schema on mixed-in variable tables - to capture additional governance, such as user who set the key; - this will then transparently become available in the blame. - """ - - if keys is None: - keys = self.resolved.keys() - blamed = {} - for key in keys: - for source in self.resolution_order: - try: - blamed[key] = Blame(source, source._variables[key]) - break - except KeyError: - pass - return blamed - - -class Project(Base, VariableMixin): - """Supports multitenancy for all other schema elements.""" - __tablename__ = 'projects' - id = Column(UUIDType(binary=False), primary_key=True) - name = Column(String(255)) - _repr_columns = [id, name] - - # TODO(jimbaker) we will surely need to define more columns, but - # this suffices to define multitenancy for MVP - - # one-to-many relationship with the following objects - clouds = relationship('Cloud', back_populates='project') - regions = relationship('Region', back_populates='project') - cells = relationship('Cell', back_populates='project') - devices = relationship('Device', back_populates='project') - users = relationship('User', back_populates='project') - networks = relationship('Network', back_populates='project') - interfaces = relationship('NetworkInterface', back_populates='project') - - -class User(Base, VariableMixin): - __tablename__ = 'users' - __table_args__ = ( - UniqueConstraint("username", "project_id", - name="uq_user0username0project"), - ) - id = Column(Integer, primary_key=True) - project_id = Column( - UUIDType(binary=False), ForeignKey('projects.id'), index=True, - nullable=False) - username = Column(String(255)) - api_key = Column(String(36)) - # root = craton admin that can create other projects/users - is_root = Column(Boolean, default=False) - # admin = project context admin - is_admin = Column(Boolean, default=False) - _repr_columns = [id, username] - - project = relationship('Project', back_populates='users') - - -class Cloud(Base, VariableMixin): - __tablename__ = 'clouds' - __table_args__ = ( - UniqueConstraint("project_id", "name", - name="uq_cloud0projectid0name"), - ) - id = Column(Integer, primary_key=True) - project_id = Column( - UUIDType(binary=False), ForeignKey('projects.id'), index=True, - nullable=False) - name = Column(String(255)) - note = Column(Text) - _repr_columns = [id, name] - - project = relationship('Project', back_populates='clouds') - regions = relationship('Region', back_populates='cloud') - cells = relationship('Cell', back_populates='cloud') - devices = relationship('Device', back_populates='cloud') - networks = relationship('Network', back_populates='cloud') - - -class Region(Base, VariableMixin): - __tablename__ = 'regions' - __table_args__ = ( - UniqueConstraint("cloud_id", "name", - name="uq_region0cloudid0name"), - ) - id = Column(Integer, primary_key=True) - project_id = Column( - UUIDType(binary=False), ForeignKey('projects.id'), index=True, - nullable=False) - cloud_id = Column( - Integer, ForeignKey('clouds.id'), index=True, nullable=False) - name = Column(String(255)) - note = Column(Text) - _repr_columns = [id, name] - - project = relationship('Project', back_populates='regions') - cloud = relationship('Cloud', back_populates='regions') - cells = relationship('Cell', back_populates='region') - devices = relationship('Device', back_populates='region') - networks = relationship('Network', back_populates='region') - - @property - def resolution_order(self): - return list(itertools.chain( - [self], - [self.cloud], - [self.project])) - - -class Cell(Base, VariableMixin): - __tablename__ = 'cells' - __table_args__ = ( - UniqueConstraint("region_id", "name", - name="uq_cell0regionid0name"), - ) - id = Column(Integer, primary_key=True) - region_id = Column( - Integer, ForeignKey('regions.id'), index=True, nullable=False) - cloud_id = Column( - Integer, ForeignKey('clouds.id'), index=True, nullable=False) - project_id = Column( - UUIDType(binary=False), ForeignKey('projects.id'), index=True, - nullable=False) - name = Column(String(255)) - note = Column(Text) - _repr_columns = [id, name] - - project = relationship('Project', back_populates='cells') - cloud = relationship('Cloud', back_populates='cells') - region = relationship('Region', back_populates='cells') - devices = relationship('Device', back_populates='cell') - networks = relationship('Network', back_populates='cell') - - @property - def resolution_order(self): - return list(itertools.chain( - [self], - [self.region], - [self.cloud], - [self.project])) - - -class Device(Base, VariableMixin): - """Base class for all devices.""" - - __tablename__ = 'devices' - __table_args__ = ( - UniqueConstraint("region_id", "name", - name="uq_device0regionid0name"), - ) - id = Column(Integer, primary_key=True) - type = Column(String(50)) # discriminant for joined table inheritance - name = Column(String(255), nullable=False) - cloud_id = Column( - Integer, ForeignKey('clouds.id'), index=True, nullable=False) - region_id = Column( - Integer, ForeignKey('regions.id'), index=True, nullable=False) - cell_id = Column( - Integer, ForeignKey('cells.id'), index=True, nullable=True) - project_id = Column( - UUIDType(binary=False), ForeignKey('projects.id'), index=True, - nullable=False) - parent_id = Column(Integer, ForeignKey('devices.id')) - ip_address = Column(IPAddressType, nullable=False) - device_type = Column(String(255), nullable=False) - # TODO(jimbaker) generalize `note` for supporting governance - active = Column(Boolean, default=True) - note = Column(Text) - - _repr_columns = [id, name] - - project = relationship('Project', back_populates='devices') - cloud = relationship('Cloud', back_populates='devices') - region = relationship('Region', back_populates='devices') - cell = relationship('Cell', back_populates='devices') - related_labels = relationship( - 'Label', back_populates='device', collection_class=set, - cascade='all, delete-orphan', lazy='joined') - labels = association_proxy('related_labels', 'label') - interfaces = relationship('NetworkInterface', back_populates='device') - children = relationship( - 'Device', backref=backref('parent', remote_side=[id])) - - @validates("parent_id") - def validate_parent_id(self, _, parent_id): - if parent_id is None: - return parent_id - elif parent_id == self.id: - msg = ( - "A device cannot be its own parent. The id for '{name}'" - " cannot be used as its parent_id." - ).format(name=self.name) - raise exceptions.ParentIDError(msg) - elif parent_id in (descendant.id for descendant in self.descendants): - msg = ( - "A device cannot have a descendant as its parent. The" - " parent_id for '{name}' cannot be set to the id '{bad_id}'." - ).format(name=self.name, bad_id=parent_id) - raise exceptions.ParentIDError(msg) - else: - return parent_id - - @property - def ancestors(self): - lineage = [] - ancestor = self.parent - while ancestor: - lineage.append(ancestor) - ancestor = ancestor.parent - return lineage - - @property - def descendants(self): - marked = OrderedDict() - descent = deque(self.children) - while descent: - descendant = descent.popleft() - marked[descendant] = True - descent.extend( - child for child in descendant.children if child not in marked - ) - return list(marked.keys()) - - @property - def resolution_order(self): - return list(itertools.chain( - [self], - self.ancestors, - [self.cell] if self.cell else [], - [self.region], - [self.cloud], - [self.project])) - - __mapper_args__ = { - 'polymorphic_on': type, - 'polymorphic_identity': 'devices', - 'with_polymorphic': '*' - } - - -class Host(Device): - __tablename__ = 'hosts' - id = Column(Integer, ForeignKey('devices.id'), primary_key=True) - hostname = Device.name - - __mapper_args__ = { - 'polymorphic_identity': 'hosts', - 'inherit_condition': (id == Device.id) - } - - -class NetworkInterface(Base, VariableMixin): - __tablename__ = 'network_interfaces' - __table_args__ = ( - UniqueConstraint("device_id", "name", - name="uq_netinter0deviceid0name"), - ) - id = Column(Integer, primary_key=True) - name = Column(String(255), nullable=True) - interface_type = Column(String(255), nullable=True) - vlan_id = Column(Integer, nullable=True) - port = Column(Integer, nullable=True) - vlan = Column(String(255), nullable=True) - duplex = Column(String(255), nullable=True) - speed = Column(String(255), nullable=True) - link = Column(String(255), nullable=True) - cdp = Column(String(255), nullable=True) - security = Column(String(255), nullable=True) - ip_address = Column(IPAddressType, nullable=False) - project_id = Column(UUIDType(binary=False), ForeignKey('projects.id'), - index=True, nullable=False) - device_id = Column(Integer, ForeignKey('devices.id')) - network_id = Column(Integer, ForeignKey('networks.id'), nullable=True) - - project = relationship( - 'Project', back_populates='interfaces', cascade='all', lazy='joined') - network = relationship( - 'Network', back_populates='interfaces', cascade='all', lazy='joined') - device = relationship( - 'Device', back_populates='interfaces', cascade='all', lazy='joined') - - -class Network(Base, VariableMixin): - __tablename__ = 'networks' - __table_args__ = ( - UniqueConstraint("name", "project_id", "region_id", - name="uq_name0projectid0regionid"), - ) - id = Column(Integer, primary_key=True) - name = Column(String(255), nullable=True) - cidr = Column(String(255), nullable=True) - gateway = Column(String(255), nullable=True) - netmask = Column(String(255), nullable=True) - ip_block_type = Column(String(255), nullable=True) - nss = Column(String(255), nullable=True) - cloud_id = Column( - Integer, ForeignKey('clouds.id'), index=True, nullable=False) - region_id = Column( - Integer, ForeignKey('regions.id'), index=True, nullable=False) - cell_id = Column( - Integer, ForeignKey('cells.id'), index=True, nullable=True) - project_id = Column( - UUIDType(binary=False), ForeignKey('projects.id'), index=True, - nullable=False) - - project = relationship('Project', back_populates='networks') - cloud = relationship('Cloud', back_populates='networks') - region = relationship('Region', back_populates='networks') - cell = relationship('Cell', back_populates='networks') - interfaces = relationship('NetworkInterface', back_populates='network') - - -class NetworkDevice(Device): - __tablename__ = 'network_devices' - id = Column(Integer, ForeignKey('devices.id'), primary_key=True) - hostname = Device.name - # network device specific properties - model_name = Column(String(255), nullable=True) - os_version = Column(String(255), nullable=True) - vlans = Column(JSON) - - __mapper_args__ = { - 'polymorphic_identity': 'network_devices', - 'inherit_condition': (id == Device.id) - } - - -class Label(Base): - """Models arbitrary labeling (aka tagging) of devices.""" - __tablename__ = 'labels' - device_id = Column( - Integer, - ForeignKey(Device.id, name='fk_labels_devices'), - primary_key=True) - label = Column(String(255), primary_key=True) - _repr_columns = [device_id, label] - - def __init__(self, label): - self.label = label - - device = relationship("Device", back_populates="related_labels") diff --git a/craton/exceptions.py b/craton/exceptions.py deleted file mode 100644 index f03e2ef..0000000 --- a/craton/exceptions.py +++ /dev/null @@ -1,100 +0,0 @@ -"""Exceptions for Craton Inventory system.""" -from oslo_log import log as logging - - -LOG = logging.getLogger(__name__) - - -class Base(Exception): - """Base Exception for Craton Inventory.""" - code = 500 - message = "An unknown exception occurred" - - def __str__(self): - return self.message - - def __init__(self, code=None, message=None, **kwargs): - if code: - self.code = code - - if not message: - try: - message = self.msg % kwargs - except Exception: - LOG.exception('Error in formatting exception message') - message = self.msg - - self.message = message - - super(Base, self).__init__( - '%s: %s' % (self.code, self.message)) - - -class DuplicateCloud(Base): - code = 409 - msg = "A cloud with the given name already exists." - - -class DuplicateRegion(Base): - code = 409 - msg = "A region with the given name already exists." - - -class DuplicateCell(Base): - code = 409 - msg = "A cell with the given name already exists." - - -class DuplicateDevice(Base): - code = 409 - msg = "A device with the given name already exists." - - -class DuplicateNetwork(Base): - code = 409 - msg = "Network with the given name already exists in this region." - - -class NetworkNotFound(Base): - code = 404 - msg = "Network not found for ID %(id)s." - - -class DeviceNotFound(Base): - code = 404 - msg = "%(device_type)s device not found for ID %(id)s." - - -class AuthenticationError(Base): - code = 401 - msg = "The request could not be authenticated." - - -class AdminRequired(Base): - code = 401 - msg = "This action requires the 'admin' role" - - -class BadRequest(Base): - code = 400 - - -class InvalidJSONPath(BadRequest): - msg = "The query contains an invalid JSON Path expression." - - -class InvalidJSONValue(BadRequest): - msg = "An invalid JSON value was specified." - - -class NotFound(Base): - code = 404 - msg = "Not Found" - - -class UnknownException(Base): - code = 500 - - -class ParentIDError(ValueError): - pass diff --git a/craton/tests/__init__.py b/craton/tests/__init__.py deleted file mode 100644 index 400b935..0000000 --- a/craton/tests/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -import mock -import testtools -from oslo_middleware import base - - -class TestContext(base.Middleware): - def __init__(self, auth_token=None, user=None, tenant=None, - is_admin=False, is_admin_project=False): - self.auth_token = auth_token - self.user = user - self.tenant = tenant - self.is_admin = is_admin - self.is_admin_project = is_admin_project - - -def make_context(*args, **kwargs): - return TestContext(*args, **kwargs) - - -class TestCase(testtools.TestCase): - - def setUp(self): - super(TestCase, self).setUp() - self.addCleanup(mock.patch.stopall) - - self.context = make_context(auth_token='fake-token', - user='fake-user', - tenant='fake-tenant', - is_admin=True, - is_admin_project=True) diff --git a/craton/tests/functional/__init__.py b/craton/tests/functional/__init__.py deleted file mode 100644 index 17335dd..0000000 --- a/craton/tests/functional/__init__.py +++ /dev/null @@ -1,493 +0,0 @@ -import contextlib -import copy -import json -import threading - -import docker -from oslo_log import log as logging -from oslo_utils import uuidutils -import requests -from retrying import retry -from sqlalchemy import create_engine -from sqlalchemy import MetaData -from sqlalchemy.orm import sessionmaker -import testtools - -from craton.db.sqlalchemy import models - - -LOG = logging.getLogger(__name__) - - -FAKE_DATA_GEN_USERNAME = 'demo' -FAKE_DATA_GEN_TOKEN = 'demo' -FAKE_DATA_GEN_PROJECT_ID = 'b9f10eca66ac4c279c139d01e65f96b4' - -FAKE_DATA_GEN_BOOTSTRAP_USERNAME = 'bootstrap' -FAKE_DATA_GEN_BOOTSTRAP_TOKEN = 'bootstrap' - -HEADER_TOKEN = 'X-Auth-Token' -HEADER_USERNAME = 'X-Auth-User' -HEADER_PROJECT = 'X-Auth-Project' - - -def get_root_headers(): - return { - HEADER_USERNAME: FAKE_DATA_GEN_BOOTSTRAP_USERNAME, - HEADER_TOKEN: FAKE_DATA_GEN_BOOTSTRAP_TOKEN - } - - -class DockerSetup(threading.Thread): - - def __init__(self): - self.container = None - self.container_is_ready = threading.Event() - self.error = None - self.client = None - self.repo_dir = './' - super(DockerSetup, self).__init__() - - def run(self): - """Build a docker container from the given Dockerfile and start - the container in a separate thread.""" - try: - self.client = docker.Client(version='auto') - is_ok = self.client.ping() - if is_ok != 'OK': - msg = 'Docker daemon ping failed.' - self.error = msg - LOG.error(self.error) - self.container_is_ready.set() - return - except Exception as err: - self.error = err - LOG.error(self.error) - self.container_is_ready.set() - return - - # Create Docker image for Craton - build_output = self.client.build(path=self.repo_dir, - tag='craton-functional-testing-api', - dockerfile='Dockerfile', - pull=True, - forcerm=True) - LOG.debug(build_output) - output_last_line = "" - for output_last_line in build_output: - pass - - message = output_last_line.decode("utf-8") - if "Successfully built" not in message: - msg = 'Failed to build docker image.' - self.error = msg - self.container_is_ready.set() - return - - # create and start the container - container_tag = 'craton-functional-testing-api' - self.container = self.client.create_container(container_tag) - self.client.start(self.container) - self.container_data = self.client.inspect_container(self.container) - if self.container_data['State']['Status'] != 'running': - msg = 'Container is not running.' - self.error = msg - self.container_is_ready.set() - return - - self.container_is_ready.set() - - def stop(self): - """Stop a running container.""" - if self.container is not None: - self.client.stop(self.container, timeout=30) - - def remove(self): - """Remove/Delete a stopped container.""" - if self.container is not None: - self.client.remove_container(self.container) - - def remove_image(self): - """Remove the image we created.""" - if self.client: - self.client.remove_image('craton-functional-testing-api') - - -@retry(wait_fixed=1000, stop_max_attempt_number=20) -def ensure_running_endpoint(container_data): - service_ip = container_data['NetworkSettings']['IPAddress'] - url = 'http://{}:7780/v1'.format(service_ip) - headers = {"Content-Type": "application/json"} - requests.get(url, headers=headers) - - -_container = None - - -def setup_container(): - global _container - - _container = DockerSetup() - _container.daemon = True - _container.start() - _container.container_is_ready.wait() - - if _container.error: - teardown_container() - else: - try: - ensure_running_endpoint(_container.container_data) - except Exception: - msg = 'Error during data generation script run.' - _container.error = msg - teardown_container() - - -def teardown_container(): - if _container: - _container.stop() - _container.remove() - _container.remove_image() - - -def setUpModule(): - setup_container() - - -def tearDownModule(): - teardown_container() - - -def setup_database(container_ip): - mysqldb = "mysql+pymysql://craton:craton@{}/craton".format(container_ip) - engine = create_engine(mysqldb) - meta = MetaData() - meta.reflect(engine) - - # NOTE(sulo, jimbaker): First clean the db up for tests, and do - # our own bootstrapping to isolate all test from any external - # scripts. - with contextlib.closing(engine.connect()) as conn: - transaction = conn.begin() - conn.execute("SET foreign_key_checks = 0") - for table in reversed(meta.sorted_tables): - conn.execute(table.delete()) - conn.execute("SET foreign_key_checks = 1") - transaction.commit() - - # NOTE(sulo, jimbaker): now bootstrap user and project; using the - # SA model allows us to respect the additional constraints in the - # model, vs having to duplicate logic if working against the - # database directly. - Session = sessionmaker(bind=engine) - session = Session() - project = models.Project( - name=FAKE_DATA_GEN_USERNAME, - id=FAKE_DATA_GEN_PROJECT_ID) - bootstrap_user = models.User( - project=project, - username=FAKE_DATA_GEN_BOOTSTRAP_USERNAME, - api_key=FAKE_DATA_GEN_BOOTSTRAP_TOKEN, - is_admin=True, - is_root=True) - demo_user = models.User( - project=project, - username=FAKE_DATA_GEN_USERNAME, - api_key=FAKE_DATA_GEN_TOKEN, - is_admin=True) - - session.add(project) - session.add(bootstrap_user) - session.add(demo_user) - - # NOTE(jimbaker) simple assumption: either this commit succeeds, - # or we need to fail fast - there's no recovery allowed in this - # testing setup. - session.commit() - - -class TestCase(testtools.TestCase): - - def setUp(self): - """Base setup provides container data back individual tests.""" - super(TestCase, self).setUp() - self.container_setup_error = _container.error - self.session = requests.Session() - - if not self.container_setup_error: - data = _container.container_data - self.service_ip = data['NetworkSettings']['IPAddress'] - self.url = 'http://{}:7780'.format(self.service_ip) - self.session.headers[HEADER_PROJECT] = FAKE_DATA_GEN_PROJECT_ID - self.session.headers[HEADER_USERNAME] = FAKE_DATA_GEN_USERNAME - self.session.headers[HEADER_TOKEN] = FAKE_DATA_GEN_TOKEN - - self.root_headers = copy.deepcopy(self.session.headers) - self.root_headers.update(get_root_headers()) - - setup_database(self.service_ip) - - def tearDown(self): - super(TestCase, self).tearDown() - - def assertSuccessOk(self, response): - self.assertEqual(requests.codes.OK, response.status_code) - - def assertSuccessCreated(self, response): - self.assertEqual(requests.codes.CREATED, response.status_code) - - def assertNoContent(self, response): - self.assertEqual(requests.codes.NO_CONTENT, response.status_code) - - def assertBadRequest(self, response): - self.assertEqual(requests.codes.BAD_REQUEST, response.status_code) - - def assertJSON(self, response): - if response.text: - try: - data = json.loads(response.text) - except json.JSONDecodeError: - self.fail("Response data is not JSON.") - else: - reference = "{formatted_data}\n".format( - formatted_data=json.dumps( - data, indent=2, sort_keys=True, separators=(',', ': ') - ) - ) - self.assertEqual( - reference, - response.text - ) - - def assertFailureFormat(self, response): - if response.status_code >= 400: - body = response.json() - self.assertEqual(2, len(body)) - self.assertEqual(response.status_code, body["status"]) - self.assertIn("message", body) - - def get(self, url, headers=None, **params): - resp = self.session.get( - url, verify=False, headers=headers, params=params, - ) - self.assertJSON(resp) - self.assertFailureFormat(resp) - return resp - - def post(self, url, headers=None, data=None): - resp = self.session.post( - url, verify=False, headers=headers, json=data, - ) - self.assertJSON(resp) - self.assertFailureFormat(resp) - return resp - - def put(self, url, headers=None, data=None): - resp = self.session.put( - url, verify=False, headers=headers, json=data, - ) - self.assertJSON(resp) - self.assertFailureFormat(resp) - return resp - - def delete(self, url, headers=None, body=None): - resp = self.session.delete( - url, verify=False, headers=headers, json=body, - ) - self.assertJSON(resp) - self.assertFailureFormat(resp) - return resp - - def create_project(self, name, variables=None): - url = self.url + '/v1/projects' - payload = {'name': name} - if variables: - payload['variables'] = variables - response = self.post(url, headers=self.root_headers, data=payload) - self.assertEqual(201, response.status_code) - self.assertIn('Location', response.headers) - project = response.json() - self.assertTrue(uuidutils.is_uuid_like(project['id'])) - self.assertEqual( - response.headers['Location'], - "{}/{}".format(url, project['id']) - ) - return project - - def create_cloud(self, name, variables=None): - url = self.url + '/v1/clouds' - - values = {'name': name} - if variables: - values['variables'] = variables - resp = self.post(url, data=values) - self.assertSuccessCreated(resp) - self.assertIn('Location', resp.headers) - json = resp.json() - self.assertEqual( - resp.headers['Location'], - "{}/{}".format(url, json['id']) - ) - return json - - def delete_clouds(self, clouds): - base_url = self.url + '/v1/clouds/{}' - for cloud in clouds: - url = base_url.format(cloud['id']) - resp = self.delete(url) - self.assertNoContent(resp) - - def create_region(self, name, cloud, variables=None): - url = self.url + '/v1/regions' - - values = {'name': name, 'cloud_id': cloud['id']} - if variables: - values['variables'] = variables - resp = self.post(url, data=values) - self.assertSuccessCreated(resp) - self.assertIn('Location', resp.headers) - json = resp.json() - self.assertEqual( - resp.headers['Location'], - "{}/{}".format(url, json['id']) - ) - return json - - def delete_regions(self, regions): - base_url = self.url + '/v1/regions/{}' - for region in regions: - url = base_url.format(region['id']) - resp = self.delete(url) - self.assertNoContent(resp) - - def create_cell(self, name, cloud, region, variables=None): - url = self.url + '/v1/cells' - payload = {'name': name, 'region_id': region['id'], - 'cloud_id': cloud['id']} - if variables: - payload['variables'] = variables - cell = self.post(url, data=payload) - self.assertEqual(201, cell.status_code) - self.assertIn('Location', cell.headers) - self.assertEqual( - cell.headers['Location'], - "{}/{}".format(url, cell.json()['id']) - ) - return cell.json() - - def create_network( - self, name, cloud, region, cidr, gateway, netmask, variables=None - ): - - url = self.url + '/v1/networks' - payload = { - 'name': name, - 'cidr': cidr, - 'gateway': gateway, - 'netmask': netmask, - 'region_id': region['id'], - 'cloud_id': cloud['id'], - } - if variables: - payload['variables'] = variables - - network = self.post(url, data=payload) - self.assertEqual(201, network.status_code) - self.assertIn('Location', network.headers) - self.assertEqual( - network.headers['Location'], - "{}/{}".format(url, network.json()['id']) - ) - return network.json() - - def create_host(self, name, cloud, region, hosttype, ip_address, - parent_id=None, **variables): - url = self.url + '/v1/hosts' - payload = { - 'name': name, - 'device_type': hosttype, - 'ip_address': ip_address, - 'region_id': region['id'], - 'cloud_id': cloud['id'] - } - if parent_id: - payload['parent_id'] = parent_id - if variables: - payload['variables'] = variables - - host = self.post(url, data=payload) - self.assertEqual(201, host.status_code) - self.assertIn('Location', host.headers) - self.assertEqual( - host.headers['Location'], - "{}/{}".format(url, host.json()['id']) - ) - return host.json() - - def create_network_device( - self, name, cloud, region, device_type, ip_address, parent_id=None, - **variables - ): - - url = self.url + '/v1/network-devices' - payload = { - 'name': name, - 'device_type': device_type, - 'ip_address': ip_address, - 'region_id': region['id'], - 'cloud_id': cloud['id'], - } - if parent_id: - payload['parent_id'] = parent_id - if variables: - payload['variables'] = variables - - network_device = self.post(url, data=payload) - self.assertEqual(201, network_device.status_code) - self.assertIn('Location', network_device.headers) - self.assertEqual( - network_device.headers['Location'], - "{}/{}".format(url, network_device.json()['id']) - ) - return network_device.json() - - -class DeviceTestBase(TestCase): - def setUp(self): - super(DeviceTestBase, self).setUp() - self.cloud = self.create_cloud() - self.region = self.create_region() - - def create_cloud(self, name='cloud-1'): - return super(DeviceTestBase, self).create_cloud(name=name) - - def create_region(self, name='region-1', cloud=None, variables=None): - return super(DeviceTestBase, self).create_region( - name=name, - cloud=cloud if cloud else self.cloud, - variables=variables, - ) - - def create_network_device(self, name, device_type, ip_address, region=None, - cloud=None, parent_id=None, **variables): - return super(DeviceTestBase, self).create_network_device( - name=name, - cloud=cloud if cloud else self.cloud, - region=region if region else self.region, - device_type=device_type, - ip_address=ip_address, - parent_id=parent_id, - **variables - ) - - def create_host(self, name, hosttype, ip_address, region=None, cloud=None, - parent_id=None, **variables): - return super(DeviceTestBase, self).create_host( - name=name, - cloud=cloud if cloud else self.cloud, - region=region if region else self.region, - hosttype=hosttype, - ip_address=ip_address, - parent_id=parent_id, - **variables - ) diff --git a/craton/tests/functional/test_cell_calls.py b/craton/tests/functional/test_cell_calls.py deleted file mode 100644 index 2f760b7..0000000 --- a/craton/tests/functional/test_cell_calls.py +++ /dev/null @@ -1,216 +0,0 @@ -from craton.tests.functional.test_variable_calls import \ - APIV1ResourceWithVariablesTestCase - - -class APIV1CellTest(APIV1ResourceWithVariablesTestCase): - - resource = 'cells' - - def setUp(self): - super(APIV1CellTest, self).setUp() - self.cloud = self.create_cloud() - self.region = self.create_region() - - def tearDown(self): - super(APIV1CellTest, self).tearDown() - - def create_cloud(self): - return super(APIV1CellTest, self).create_cloud(name='cloud-1') - - def create_region(self): - return super(APIV1CellTest, self).create_region( - name='region-1', - cloud=self.cloud, - variables={"region": "one"}, - ) - - def create_cell(self, name, variables=None): - return super(APIV1CellTest, self).create_cell( - name=name, - cloud=self.cloud, - region=self.region, - variables=variables - ) - - def test_cell_create_with_variables(self): - variables = {'a': 'b'} - cell = self.create_cell('cell-a', variables=variables) - self.assertEqual('cell-a', cell['name']) - self.assertEqual(variables, cell['variables']) - - def test_create_cell_supports_vars_ops(self): - cell = self.create_cell('new-cell', {'a': 'b'}) - self.assert_vars_get_expected(cell['id'], {'a': 'b'}) - self.assert_vars_can_be_set(cell['id']) - self.assert_vars_can_be_deleted(cell['id']) - - def test_cell_create_with_no_name_fails(self): - url = self.url + '/v1/cells' - payload = {'region_id': self.region['id']} - cell = self.post(url, data=payload) - self.assertEqual(400, cell.status_code) - - def test_cell_create_with_duplicate_name_fails(self): - self.create_cell('test-cell') - url = self.url + '/v1/cells' - payload = {'name': 'test-cell', 'region_id': self.region['id'], - "cloud_id": self.cloud['id']} - cell = self.post(url, data=payload) - self.assertEqual(409, cell.status_code) - - def test_cell_create_with_extra_id_property_fails(self): - url = self.url + '/v1/cells' - payload = {'region_id': self.region['id'], - 'cloud_id': self.cloud['id'], 'name': 'a', 'id': 3} - cell = self.post(url, data=payload) - self.assertEqual(400, cell.status_code) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed ('id' was unexpected)" - ) - self.assertEqual(cell.json()['message'], msg) - - def test_cell_create_with_extra_created_at_property_fails(self): - url = self.url + '/v1/cells' - payload = {'region_id': self.region['id'], - 'cloud_id': self.cloud['id'], 'name': 'a', - 'created_at': "some date"} - cell = self.post(url, data=payload) - self.assertEqual(400, cell.status_code) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed " - "('created_at' was unexpected)" - ) - self.assertEqual(cell.json()['message'], msg) - - def test_cell_create_with_extra_updated_at_property_fails(self): - url = self.url + '/v1/cells' - payload = {'region_id': self.region['id'], - 'cloud_id': self.cloud['id'], 'name': 'a', - 'updated_at': "some date"} - cell = self.post(url, data=payload) - self.assertEqual(400, cell.status_code) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed " - "('updated_at' was unexpected)" - ) - self.assertEqual(cell.json()['message'], msg) - - def test_cell_create_missing_all_properties_fails(self): - url = self.url + '/v1/cells' - cell = self.post(url, data={}) - self.assertEqual(400, cell.status_code) - msg = ( - "The request included the following errors:\n" - "- 'cloud_id' is a required property\n" - "- 'name' is a required property\n" - "- 'region_id' is a required property" - ) - self.assertEqual(cell.json()['message'], msg) - - def test_cells_get_all_with_details(self): - self.create_cell('cell1', variables={'a': 'b'}) - self.create_cell('cell2', variables={'c': 'd'}) - url = self.url + '/v1/cells?details=all' - resp = self.get(url) - cells = resp.json()['cells'] - self.assertEqual(2, len(cells)) - for cell in cells: - self.assertTrue('variables' in cell) - - for cell in cells: - if cell['name'] == 'cell1': - expected = {'a': 'b', "region": "one"} - self.assertEqual(expected, cell['variables']) - if cell['name'] == 'cell2': - expected = {'c': 'd', "region": "one"} - self.assertEqual(expected, cell['variables']) - - def test_cells_get_all_for_region(self): - # Create a cell first - self.create_cell('cell-1') - url = self.url + '/v1/cells?region_id={}'.format(self.region['id']) - resp = self.get(url) - cells = resp.json()['cells'] - self.assertEqual(1, len(cells)) - self.assertEqual(['cell-1'], [i['name'] for i in cells]) - - def test_cells_get_all_for_cloud(self): - # Create a cell first - for i in range(2): - self.create_cell('cell-{}'.format(str(i))) - url = self.url + '/v1/cells?cloud_id={}'.format(self.cloud['id']) - resp = self.get(url) - cells = resp.json()['cells'] - self.assertEqual(2, len(cells)) - self.assertEqual(['cell-0', 'cell-1'], [i['name'] for i in cells]) - - def test_cell_get_all_with_name_filter(self): - self.create_cell('cell1') - self.create_cell('cell2') - url = self.url + '/v1/cells?name=cell2' - resp = self.get(url) - cells = resp.json()['cells'] - self.assertEqual(1, len(cells)) - self.assertEqual({'cell2'}, {cell['name'] for cell in cells}) - - def test_get_cell_details(self): - cellvars = {"who": "that"} - cell = self.create_cell('cell1', variables=cellvars) - url = self.url + '/v1/cells/{}'.format(cell['id']) - resp = self.get(url) - cell_with_detail = resp.json() - self.assertEqual('cell1', cell_with_detail['name']) - - def test_get_cell_resolved_vars(self): - cellvars = {"who": "that"} - cell = self.create_cell('cell1', variables=cellvars) - url = self.url + '/v1/cells/{}'.format(cell['id']) - resp = self.get(url) - cell_with_detail = resp.json() - self.assertEqual('cell1', cell_with_detail['name']) - self.assertEqual({"who": "that", "region": "one"}, - cell_with_detail['variables']) - - def test_get_cell_unresolved_vars(self): - cellvars = {"who": "that"} - cell = self.create_cell('cell1', variables=cellvars) - cell_id = cell['id'] - url = self.url + '/v1/cells/{}?resolved-values=false'.format(cell_id) - resp = self.get(url) - cell_with_detail = resp.json() - self.assertEqual('cell1', cell_with_detail['name']) - self.assertEqual({"who": "that"}, cell_with_detail['variables']) - - def test_cell_update(self): - cell = self.create_cell('cell-1') - url = self.url + '/v1/cells/{}'.format(cell['id']) - data = {'note': 'Updated cell note.'} - resp = self.put(url, data=data) - self.assertEqual(200, resp.status_code) - cell = resp.json() - self.assertEqual(data['note'], cell['note']) - - def test_cell_delete(self): - cell1 = self.create_cell('cell-1') - self.create_cell('cell-2') - url = self.url + '/v1/cells' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - cells = resp.json()['cells'] - self.assertEqual(2, len(cells)) - self.assertEqual({'cell-1', 'cell-2'}, - {cell['name'] for cell in cells}) - - delurl = self.url + '/v1/cells/{}'.format(cell1['id']) - resp = self.delete(delurl) - self.assertEqual(204, resp.status_code) - - resp = self.get(url) - self.assertEqual(200, resp.status_code) - cells = resp.json()['cells'] - self.assertEqual(1, len(cells)) - self.assertEqual({'cell-2'}, - {cell['name'] for cell in cells}) diff --git a/craton/tests/functional/test_cloud_calls.py b/craton/tests/functional/test_cloud_calls.py deleted file mode 100644 index 7626b5e..0000000 --- a/craton/tests/functional/test_cloud_calls.py +++ /dev/null @@ -1,204 +0,0 @@ -import urllib.parse - -from craton.tests.functional import TestCase - - -class APIV1CloudTest(TestCase): - """Test cases for /cloud calls. - """ - - def test_create_cloud_full_data(self): - # Test with full set of allowed parameters - values = {"name": "cloud-new", - "note": "This is cloud-new.", - "variables": {"a": "b"}} - url = self.url + '/v1/clouds' - resp = self.post(url, data=values) - self.assertEqual(201, resp.status_code) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "{}/{}".format(url, resp.json()['id']) - ) - self.assertEqual(values['name'], resp.json()['name']) - - def test_create_cloud_without_variables(self): - values = {"name": "cloud-two", - "note": "This is cloud-two"} - url = self.url + '/v1/clouds' - resp = self.post(url, data=values) - self.assertEqual(201, resp.status_code) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "{}/{}".format(url, resp.json()['id']) - ) - self.assertEqual("cloud-two", resp.json()['name']) - - def test_create_cloud_with_no_name_fails(self): - values = {"note": "This is cloud one."} - url = self.url + '/v1/clouds' - resp = self.post(url, data=values) - self.assertEqual(resp.status_code, 400) - err_msg = ( - "The request included the following errors:\n" - "- 'name' is a required property" - ) - self.assertEqual(resp.json()['message'], err_msg) - - def test_create_cloud_with_duplicate_name_fails(self): - self.create_cloud("ORD135") - - values = {"name": "ORD135"} - url = self.url + '/v1/clouds' - resp = self.post(url, data=values) - self.assertEqual(409, resp.status_code) - - def test_create_region_with_extra_id_property_fails(self): - values = {"name": "test", "id": 101} - url = self.url + '/v1/clouds' - resp = self.post(url, data=values) - self.assertEqual(resp.status_code, 400) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed ('id' was unexpected)" - ) - self.assertEqual(resp.json()['message'], msg) - - def test_create_region_with_extra_created_at_property_fails(self): - values = {"name": "test", "created_at": "some date"} - url = self.url + '/v1/clouds' - resp = self.post(url, data=values) - self.assertEqual(resp.status_code, 400) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed " - "('created_at' was unexpected)" - ) - self.assertEqual(resp.json()['message'], msg) - - def test_create_region_with_extra_updated_at_property_fails(self): - values = {"name": "test", "updated_at": "some date"} - url = self.url + '/v1/clouds' - resp = self.post(url, data=values) - self.assertEqual(resp.status_code, 400) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed " - "('updated_at' was unexpected)" - ) - self.assertEqual(resp.json()['message'], msg) - - def test_cloud_create_missing_all_properties_fails(self): - url = self.url + '/v1/clouds' - cloud = self.post(url, data={}) - self.assertEqual(400, cloud.status_code) - msg = ( - "The request included the following errors:\n" - "- 'name' is a required property" - ) - self.assertEqual(cloud.json()['message'], msg) - - def test_clouds_get_all(self): - self.create_cloud("ORD1") - self.create_cloud("ORD2") - url = self.url + '/v1/clouds' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - self.assertEqual(2, len(resp.json())) - - def test_clouds_get_all_with_details_filter(self): - c1 = self.create_cloud("ORD1", variables={'a': 'b'}) - c2 = self.create_cloud("ORD2", variables={'c': 'd'}) - url = self.url + '/v1/clouds?details=all' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - clouds = resp.json()['clouds'] - self.assertEqual(2, len(clouds)) - for cloud in clouds: - self.assertTrue('variables' in cloud) - - for cloud in clouds: - if cloud['name'] == 'ORD1': - self.assertEqual(c1['variables'], {'a': 'b'}) - if cloud['name'] == 'ORD2': - self.assertEqual(c2['variables'], {'c': 'd'}) - - def test_clouds_get_all_with_name_filter(self): - self.create_cloud("ORD1") - self.create_cloud("ORD2") - url = self.url + '/v1/clouds?name=ORD1' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - clouds = resp.json()['clouds'] - self.assertEqual(1, len(clouds)) - self.assertEqual('ORD1', clouds[0]['name']) - - def test_cloud_with_non_existing_filters(self): - self.create_cloud("ORD1") - url = self.url + '/v1/clouds?name=idontexist' - resp = self.get(url) - self.assertEqual(404, resp.status_code) - - def test_cloud_get_details_for_cloud(self): - regvars = {"a": "b", "one": "two"} - cloud = self.create_cloud("ORD1", variables=regvars) - url = self.url + '/v1/clouds/{}'.format(cloud['id']) - resp = self.get(url) - cloud = resp.json() - self.assertEqual(cloud['name'], 'ORD1') - self.assertEqual(regvars, cloud['variables']) - - -class TestPagination(TestCase): - - def setUp(self): - super(TestPagination, self).setUp() - self.clouds = [self.create_cloud('cloud-{}'.format(i)) - for i in range(0, 61)] - self.addCleanup(self.delete_clouds, self.clouds) - - def test_list_first_thirty_clouds(self): - url = self.url + '/v1/clouds' - response = self.get(url) - self.assertSuccessOk(response) - json = response.json() - self.assertIn('clouds', json) - self.assertEqual(30, len(json['clouds'])) - self.assertListEqual([r['id'] for r in self.clouds[:30]], - [r['id'] for r in json['clouds']]) - - def test_get_returns_correct_next_link(self): - url = self.url + '/v1/clouds' - thirtieth_cloud = self.clouds[29] - response = self.get(url) - self.assertSuccessOk(response) - json = response.json() - self.assertIn('links', json) - for link_rel in json['links']: - if link_rel['rel'] == 'next': - break - else: - self.fail("No 'next' link was returned in response") - - parsed_next = urllib.parse.urlparse(link_rel['href']) - self.assertIn('marker={}'.format(thirtieth_cloud['id']), - parsed_next.query) - - def test_get_returns_correct_prev_link(self): - first_cloud = self.clouds[0] - thirtieth_cloud = self.clouds[29] - url = self.url + '/v1/clouds?marker={}'.format(thirtieth_cloud['id']) - response = self.get(url) - self.assertSuccessOk(response) - json = response.json() - self.assertIn('links', json) - for link_rel in json['links']: - if link_rel['rel'] == 'prev': - break - else: - self.fail("No 'prev' link was returned in response") - - parsed_prev = urllib.parse.urlparse(link_rel['href']) - self.assertIn('marker={}'.format(first_cloud['id']), - parsed_prev.query) diff --git a/craton/tests/functional/test_device_calls.py b/craton/tests/functional/test_device_calls.py deleted file mode 100644 index ba28a65..0000000 --- a/craton/tests/functional/test_device_calls.py +++ /dev/null @@ -1,190 +0,0 @@ -from itertools import count, cycle -import urllib.parse - -from craton.tests.functional import DeviceTestBase - - -class DeviceTests(DeviceTestBase): - - def count_devices(self, devices): - num_devices = ( - len(devices['hosts']) + - len(devices['network-devices']) - ) - return num_devices - - -class APIV1DeviceTest(DeviceTests): - - def setUp(self): - super().setUp() - self.net_device1 = self.create_network_device( - 'network_device1', 'switch', '192.168.1.1' - ) - self.net_device2 = self.create_network_device( - 'network_device2', 'switch', '192.168.1.2', - parent_id=self.net_device1['id'], - ) - self.host1 = self.create_host( - 'host1', 'server', '192.168.1.3', parent_id=self.net_device2['id'] - ) - self.container1 = self.create_host( - 'host1container1', 'container', '192.168.1.4', - parent_id=self.host1['id'], - ) - url = self.url + '/v1/devices' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - devices = resp.json()['devices'] - self.assertEqual(4, self.count_devices(devices)) - - def test_device_get_by_parent_id_no_descendants(self): - url = '{}/v1/devices?parent_id={}'.format( - self.url, self.net_device1['id'] - ) - resp = self.get(url) - self.assertEqual(200, resp.status_code) - devices = resp.json()['devices'] - self.assertEqual(1, self.count_devices(devices)) - self.assertEqual( - self.net_device1['id'], devices['network-devices'][0]['parent_id'] - ) - - def test_device_get_by_parent_id_with_descendants(self): - url = '{}/v1/devices?parent_id={}&descendants=true'.format( - self.url, self.net_device1['id'] - ) - resp = self.get(url) - self.assertEqual(200, resp.status_code) - devices = resp.json()['devices'] - self.assertEqual(3, self.count_devices(devices)) - self.assertEqual( - self.net_device1['id'], devices['network-devices'][0]['parent_id'] - ) - self.assertEqual( - self.net_device2['id'], devices['hosts'][0]['parent_id'] - ) - self.assertEqual(self.host1['id'], devices['hosts'][1]['parent_id']) - - def test_device_by_missing_filter(self): - url = self.url + '/v1/devices?active=false' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - devices = resp.json()['devices'] - self.assertEqual(0, self.count_devices(devices)) - - -class TestPagination(DeviceTests): - - def setUp(self): - super().setUp() - self.devices = [] - last_octet = count(1) - - first_network_device = self.create_network_device( - 'network-device0', - 'switch', - '192.168.1.{}'.format(next(last_octet)), - ) - self.devices.append(first_network_device) - - for i in range(1, 3): - network_device = self.create_network_device( - 'network-device{}'.format(i), - 'switch', - '192.168.1.{}'.format(next(last_octet)), - ) - self.devices.append(network_device) - host_parents = ( - self.devices[1], - self.devices[2], - ) - for i, host_parent in zip(range(12), cycle(host_parents)): - host = self.create_host( - 'host{}'.format(i), - 'server', - '192.168.1.{}'.format(next(last_octet)), - parent_id=host_parent['id'], - ) - self.devices.append(host) - - for j in range(4): - container = self.create_host( - 'host{}container{}'.format(i, j), - 'container', - '192.168.1.{}'.format(next(last_octet)), - parent_id=host['id'], - ) - self.devices.append(container) - - def test_get_returns_a_default_list_of_thirty_devices(self): - response = self.get(self.url + '/v1/devices') - self.assertSuccessOk(response) - devices = response.json() - self.assertIn('devices', devices) - self.assertEqual(30, self.count_devices(devices['devices'])) - returned_device_ids = sorted( - device['id'] - for dt in devices['devices'].values() - for device in dt - ) - self.assertListEqual( - [d['id'] for d in self.devices[:30]], - returned_device_ids - ) - - def test_get_returns_correct_next_link(self): - thirtieth_device = self.devices[29] - response = self.get(self.url + '/v1/devices') - self.assertSuccessOk(response) - devices = response.json() - self.assertIn('links', devices) - for link_rel in devices['links']: - if link_rel['rel'] == 'next': - break - else: - self.fail("No 'next' link was returned in response") - - parsed_next = urllib.parse.urlparse(link_rel['href']) - self.assertIn('marker={}'.format(thirtieth_device['id']), - parsed_next.query) - - def test_get_returns_correct_prev_link(self): - first_device = self.devices[0] - thirtieth_device = self.devices[29] - url = self.url + '/v1/devices?marker={}'.format(thirtieth_device['id']) - response = self.get(url) - self.assertSuccessOk(response) - devices = response.json() - self.assertIn('links', devices) - for link_rel in devices['links']: - if link_rel['rel'] == 'prev': - break - else: - self.fail("No 'prev' link was returned in response") - - parsed_prev = urllib.parse.urlparse(link_rel['href']) - self.assertIn( - 'marker={}'.format(first_device['id']), parsed_prev.query - ) - - def test_ascending_sort_by_name(self): - response = self.get(self.url + '/v1/devices', - sort_keys='name', sort_dir='asc') - self.assertSuccessOk(response) - devices = response.json()['devices'] - self.assertEqual(30, self.count_devices(devices)) - - def test_ascending_sort_by_name_and_id(self): - response = self.get(self.url + '/v1/devices', - sort_keys='name,id', sort_dir='asc') - self.assertSuccessOk(response) - devices = response.json()['devices'] - self.assertEqual(30, self.count_devices(devices)) - - def test_ascending_sort_by_name_and_id_space_separated(self): - response = self.get(self.url + '/v1/devices', - sort_keys='name id', sort_dir='asc') - self.assertSuccessOk(response) - devices = response.json()['devices'] - self.assertEqual(30, self.count_devices(devices)) diff --git a/craton/tests/functional/test_host_calls.py b/craton/tests/functional/test_host_calls.py deleted file mode 100644 index 382168b..0000000 --- a/craton/tests/functional/test_host_calls.py +++ /dev/null @@ -1,552 +0,0 @@ -import urllib.parse - -from craton.tests.functional import DeviceTestBase -from craton.tests.functional.test_variable_calls import \ - APIV1ResourceWithVariablesTestCase - - -class APIV1HostTest(DeviceTestBase, APIV1ResourceWithVariablesTestCase): - - resource = 'hosts' - - def test_create_host_supports_vars_ops(self): - host = self.create_host('host1', 'server', '192.168.1.1') - self.assert_vars_get_expected(host['id'], {}) - self.assert_vars_can_be_set(host['id']) - self.assert_vars_can_be_deleted(host['id']) - - def test_host_get_by_vars_filter(self): - vars1 = {"a": "b", "host": "one"} - self.create_host('host1', 'server', '192.168.1.1', **vars1) - vars2 = {"a": "b"} - self.create_host('host2', 'server', '192.168.1.2', **vars2) - - url = self.url + '/v1/hosts' - resp = self.get(url, vars='a:"b"') - self.assertEqual(200, resp.status_code) - hosts = resp.json()['hosts'] - self.assertEqual(2, len(hosts)) - self.assertEqual({'192.168.1.1', '192.168.1.2'}, - {host['ip_address'] for host in hosts}) - - url = self.url + '/v1/hosts' - resp = self.get(url, vars='host:"one"') - self.assertEqual(200, resp.status_code) - hosts = resp.json()['hosts'] - self.assertEqual(1, len(hosts)) - self.assertEqual('192.168.1.1', hosts[0]['ip_address']) - self.assert_vars_get_expected(hosts[0]['id'], vars1) - - def test_create_host(self): - host = self.create_host('host1', 'server', '192.168.1.1') - self.assertEqual('host1', host['name']) - - def test_create_with_missing_name_fails(self): - url = self.url + '/v1/hosts' - payload = {'device_type': 'server', 'ip_address': '192.168.1.1', - 'region_id': self.region['id']} - host = self.post(url, data=payload) - self.assertEqual(400, host.status_code) - - def test_create_with_missing_ip_fails(self): - url = self.url + '/v1/hosts' - payload = {'name': 'test', 'device_type': 'server', - 'region_id': self.region['id']} - host = self.post(url, data=payload) - self.assertEqual(400, host.status_code) - - def test_create_with_missing_type_fails(self): - url = self.url + '/v1/hosts' - payload = {'name': 'who', 'ip_address': '192.168.1.1', - 'region_id': self.region['id']} - host = self.post(url, data=payload) - self.assertEqual(400, host.status_code) - - def test_create_with_extra_id_property_fails(self): - url = self.url + '/v1/hosts' - payload = {'device_type': 'server', 'ip_address': '192.168.1.1', - 'region_id': self.region['id'], - 'cloud_id': self.cloud['id'], 'name': 'a', 'id': 1} - host = self.post(url, data=payload) - self.assertEqual(400, host.status_code) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed ('id' was unexpected)" - ) - self.assertEqual(host.json()['message'], msg) - - def test_create_with_extra_created_at_property_fails(self): - url = self.url + '/v1/hosts' - payload = {'device_type': 'server', 'ip_address': '192.168.1.1', - 'region_id': self.region['id'], - 'cloud_id': self.cloud['id'], 'name': 'a', - 'created_at': 'some date'} - host = self.post(url, data=payload) - self.assertEqual(400, host.status_code) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed " - "('created_at' was unexpected)" - ) - self.assertEqual(host.json()['message'], msg) - - def test_create_with_extra_updated_at_property_fails(self): - url = self.url + '/v1/hosts' - payload = {'device_type': 'server', 'ip_address': '192.168.1.1', - 'region_id': self.region['id'], - 'cloud_id': self.cloud['id'], 'name': 'a', - 'updated_at': 'some date'} - host = self.post(url, data=payload) - self.assertEqual(400, host.status_code) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed " - "('updated_at' was unexpected)" - ) - self.assertEqual(host.json()['message'], msg) - - def test_create_missing_all_properties_fails(self): - url = self.url + '/v1/hosts' - host = self.post(url, data={}) - self.assertEqual(400, host.status_code) - msg = ( - "The request included the following errors:\n" - "- 'cloud_id' is a required property\n" - "- 'device_type' is a required property\n" - "- 'ip_address' is a required property\n" - "- 'name' is a required property\n" - "- 'region_id' is a required property" - ) - self.assertEqual(host.json()['message'], msg) - - def test_create_with_parent_id(self): - parent = self.create_host( - name='test1', - cloud=self.cloud, - region=self.region, - hosttype='server', - ip_address='192.168.1.1', - ) - child = self.create_host( - name='test2', - cloud=self.cloud, - region=self.region, - hosttype='server', - ip_address='192.168.1.2', - parent_id=parent['id'], - ) - self.assertEqual(parent['id'], child['parent_id']) - - def test_update_with_parent_id(self): - parent = self.create_host( - name='test1', - cloud=self.cloud, - region=self.region, - hosttype='server', - ip_address='192.168.1.1', - ) - - child = self.create_host( - name='test2', - cloud=self.cloud, - region=self.region, - hosttype='server', - ip_address='192.168.1.2', - ) - self.assertIsNone(child['parent_id']) - - url = '{}/v1/hosts/{}'.format(self.url, child['id']) - child_update_resp = self.put( - url, data={'parent_id': parent['id']} - ) - self.assertEqual(200, child_update_resp.status_code) - child_update = child_update_resp.json() - self.assertEqual(parent['id'], child_update['parent_id']) - - def test_update_with_parent_id_equal_id_fails(self): - host = self.create_host( - name='test1', - cloud=self.cloud, - region=self.region, - hosttype='server', - ip_address='192.168.1.1', - ) - - url = '{}/v1/hosts/{}'.format(self.url, host['id']) - host_update_resp = self.put( - url, data={'parent_id': host['id']} - ) - self.assertEqual(400, host_update_resp.status_code) - - def test_update_with_parent_id_equal_descendant_id_fails(self): - parent = self.create_host( - name='test1', - cloud=self.cloud, - region=self.region, - hosttype='server', - ip_address='192.168.1.1', - ) - self.assertIsNone(parent['parent_id']) - - child = self.create_host( - name='test2', - cloud=self.cloud, - region=self.region, - hosttype='server', - ip_address='192.168.1.2', - parent_id=parent['id'], - ) - self.assertEqual(parent['id'], child['parent_id']) - - grandchild = self.create_host( - name='test3', - cloud=self.cloud, - region=self.region, - hosttype='server', - ip_address='192.168.1.3', - parent_id=child['id'], - ) - self.assertEqual(child['id'], grandchild['parent_id']) - - url = '{}/v1/hosts/{}'.format(self.url, parent['id']) - parent_update_resp = self.put( - url, data={'parent_id': grandchild['id']} - ) - self.assertEqual(400, parent_update_resp.status_code) - - def test_get_all_hosts_with_details(self): - region_vars = {'x': 'y'} - region = self.create_region(name='region1', variables=region_vars) - variables = {"a": "b"} - self.create_host('host1', 'server', '192.168.1.1', region=region, - **variables) - self.create_host('host2', 'server', '192.168.1.2', region=region, - **variables) - url = self.url + '/v1/hosts?details=all' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - hosts = resp.json()['hosts'] - self.assertEqual(2, len(hosts)) - for host in hosts: - self.assertTrue('variables' in host) - self.assertEqual({'a': 'b', 'x': 'y'}, host['variables']) - - def test_host_get_by_ip_filter(self): - self.create_host('host1', 'server', '192.168.1.1') - self.create_host('host2', 'server', '192.168.1.2') - url = self.url + '/v1/hosts?ip_address=192.168.1.1' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - hosts = resp.json()['hosts'] - self.assertEqual(1, len(hosts)) - self.assertEqual('192.168.1.1', hosts[0]['ip_address']) - - def test_host_by_missing_filter(self): - self.create_host('host1', 'server', '192.168.1.1') - url = self.url + '/v1/hosts?ip_address=192.168.1.2' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - self.assertEqual(0, len(resp.json()['hosts'])) - - def test_host_create_labels(self): - res = self.create_host('host1', 'server', '192.168.1.1') - url = self.url + '/v1/hosts/{}/labels'.format(res['id']) - - data = {"labels": ["compute"]} - resp = self.put(url, data=data) - self.assertEqual(200, resp.status_code) - - resp = self.get(url) - self.assertEqual(data, resp.json()) - - def test_host_by_label_filter_match_one(self): - labels_route_mask = '/v1/hosts/{}/labels' - host1 = self.create_host('host1', 'server', '192.168.1.1') - host2 = self.create_host('host2', 'server', '192.168.1.2') - host3 = self.create_host('host3', 'server', '192.168.1.3') - - # set labels on hosts - data = {"labels": ["compute"]} - for host in (host1, host2, host3): - url = self.url + labels_route_mask.format(host['id']) - resp = self.put(url, data=data) - self.assertEqual(200, resp.status_code) - - # set one of them with extra labels - data = {"labels": ["compute", "scheduler"]} - url = self.url + labels_route_mask.format(host3['id']) - resp = self.put(url, data=data) - self.assertEqual(200, resp.status_code) - - # get hosts by its label - url = self.url + '/v1/hosts?label=scheduler' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - hosts = resp.json()['hosts'] - self.assertEqual(1, len(hosts)) - self.assertEqual(host3['id'], hosts[0]['id']) - - def test_host_by_label_filters_match_all(self): - labels_route_mask = '/v1/hosts/{}/labels' - host1 = self.create_host('host1', 'server', '192.168.1.1') - host2 = self.create_host('host2', 'server', '192.168.1.2') - host3 = self.create_host('host3', 'server', '192.168.1.3') - - # set labels on hosts - data = {"labels": ["compute"]} - for host in (host1, host2, host3): - url = self.url + labels_route_mask.format(host['id']) - resp = self.put(url, data=data) - self.assertEqual(200, resp.status_code) - - # set one of them with extra labels - data = {"labels": ["compute", "scheduler"]} - url = self.url + labels_route_mask.format(host2['id']) - resp = self.put(url, data=data) - self.assertEqual(200, resp.status_code) - - # get hosts by its label - url = self.url + '/v1/hosts?label=scheduler&label=compute' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - hosts = resp.json()['hosts'] - self.assertEqual(1, len(hosts)) - self.assertEqual(host2['id'], hosts[0]['id']) - - def test_host_by_label_filters_match_one_common(self): - labels_route_mask = '/v1/hosts/{}/labels' - test_hosts = [ - self.create_host('host1', 'server', '192.168.1.1'), - self.create_host('host2', 'server', '192.168.1.2'), - self.create_host('host3', 'server', '192.168.1.3'), - ] - - # set labels on hosts - data = {"labels": ["compute"]} - for host in test_hosts: - url = self.url + labels_route_mask.format(host['id']) - resp = self.put(url, data=data) - self.assertEqual(200, resp.status_code) - - # set one of them with extra labels - data = {"labels": ["compute", "scheduler"]} - url = self.url + labels_route_mask.format(test_hosts[1]['id']) - resp = self.put(url, data=data) - self.assertEqual(200, resp.status_code) - - # get hosts by its label - url = self.url + '/v1/hosts?label=compute' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - hosts = resp.json()['hosts'] - self.assertEqual(3, len(hosts)) - self.assertEqual(sorted([host['id'] for host in test_hosts]), - sorted([host['id'] for host in hosts])) - - def test_host_get_all_vars_filter_resolved_region(self): - region_vars = {'foo': 'bar'} - region = self.create_region(name='region-2', variables=region_vars) - host_vars = {'baz': 'zoo'} - self.create_host('host1', 'server', '192.168.1.1', **host_vars) - host2 = self.create_host('host2', 'server', '192.168.1.2', - region=region, **host_vars) - url = self.url + '/v1/hosts' - - resp = self.get(url, vars='foo:"bar",baz:"zoo"') - hosts = resp.json()['hosts'] - self.assertEqual(1, len(hosts)) - self.assertEqual(host2['id'], hosts[0]['id']) - - def test_host_get_all_vars_filter_resolved_region_and_host(self): - region_vars = {'foo': 'bar'} - region = self.create_region(name='region-2', variables=region_vars) - host_vars = {'baz': 'zoo'} - host1 = self.create_host('host1', 'server', '192.168.1.1', - **region_vars) - host2 = self.create_host('host2', 'server', '192.168.1.2', - region=region, **host_vars) - url = self.url + '/v1/hosts' - - resp = self.get(url, vars='foo:"bar"') - hosts = resp.json()['hosts'] - self.assertEqual(2, len(hosts)) - self.assertListEqual(sorted([host1['id'], host2['id']]), - sorted([host['id'] for host in hosts])) - - def test_host_get_all_vars_filter_resolved_region_child_override(self): - region_vars = {'foo': 'bar'} - region = self.create_region(name='region-2', variables=region_vars) - host1 = self.create_host('host1', 'server', '192.168.1.1', - region=region, foo='baz') - host2 = self.create_host('host2', 'server', '192.168.1.2', - region=region) - url = self.url + '/v1/hosts' - - resp = self.get(url, vars='foo:"baz"') - hosts = resp.json()['hosts'] - self.assertEqual(1, len(hosts)) - self.assertEqual(host1['id'], hosts[0]['id']) - - resp = self.get(url, vars='foo:"bar"') - hosts = resp.json()['hosts'] - self.assertEqual(1, len(hosts)) - self.assertEqual(host2['id'], hosts[0]['id']) - - def test_host_get_all_vars_filter_resolved_host_child_override(self): - host1 = self.create_host('host1', 'server', '192.168.1.1', - baz='zoo') - host2 = self.create_host('host2', 'server', '192.168.1.2', - parent_id=host1['id'], baz='boo') - url = self.url + '/v1/hosts' - - resp = self.get(url, vars='baz:"zoo"') - hosts = resp.json()['hosts'] - self.assertEqual(1, len(hosts)) - self.assertEqual(host1['id'], hosts[0]['id']) - - resp = self.get(url, vars='baz:"boo"') - hosts = resp.json()['hosts'] - self.assertEqual(1, len(hosts)) - self.assertEqual(host2['id'], hosts[0]['id']) - - def test_host_get_all_vars_filter_unresolved(self): - host1 = self.create_host('host1', 'server', '192.168.1.1', - foo='bar', baz='zoo') - self.create_host('host2', 'server', '192.168.1.2', foo='bar') - - # NOTE(thomasem): Unfortunately, we use resolved-values instead of - # resolved_values, so we can't pass this in as kwargs to self.get(...), - # see https://bugs.launchpad.net/craton/+bug/1672880. - url = self.url + \ - '/v1/hosts?resolved-values=false&vars=foo:"bar",baz:"zoo"' - - resp = self.get(url) - hosts = resp.json()['hosts'] - self.assertEqual(1, len(hosts)) - self.assertEqual(host1['id'], hosts[0]['id']) - - def test_host_delete(self): - host = self.create_host('host1', 'server', '192.168.1.1') - url = self.url + '/v1/hosts/{}'.format(host['id']) - resp = self.delete(url) - self.assertEqual(204, resp.status_code) - - resp = self.get(url) - self.assertEqual(404, resp.status_code) - self.assertEqual({'status': 404, 'message': 'Not Found'}, - resp.json()) - - -class TestPagination(DeviceTestBase): - - def setUp(self): - super(TestPagination, self).setUp() - self.hosts = [ - self.create_host('host{}'.format(i), 'server', - '192.168.1.{}'.format(i + 1)) - for i in range(0, 61) - ] - - def test_get_returns_a_default_list_of_thirty_hosts(self): - response = self.get(self.url + '/v1/hosts') - self.assertSuccessOk(response) - hosts = response.json() - self.assertIn('hosts', hosts) - self.assertEqual(30, len(hosts['hosts'])) - self.assertListEqual([h['id'] for h in self.hosts[:30]], - [h['id'] for h in hosts['hosts']]) - - def test_get_returns_correct_next_link(self): - thirtieth_host = self.hosts[29] - response = self.get(self.url + '/v1/hosts') - self.assertSuccessOk(response) - hosts = response.json() - self.assertIn('links', hosts) - for link_rel in hosts['links']: - if link_rel['rel'] == 'next': - break - else: - self.fail("No 'next' link was returned in response") - - parsed_next = urllib.parse.urlparse(link_rel['href']) - self.assertIn('marker={}'.format(thirtieth_host['id']), - parsed_next.query) - - def test_get_returns_correct_prev_link(self): - first_host = self.hosts[0] - thirtieth_host = self.hosts[29] - url = self.url + '/v1/hosts?marker={}'.format(thirtieth_host['id']) - response = self.get(url) - self.assertSuccessOk(response) - hosts = response.json() - self.assertIn('links', hosts) - for link_rel in hosts['links']: - if link_rel['rel'] == 'prev': - break - else: - self.fail("No 'prev' link was returned in response") - - parsed_prev = urllib.parse.urlparse(link_rel['href']) - self.assertIn('marker={}'.format(first_host['id']), parsed_prev.query) - - def test_get_all_for_region(self): - region = self.create_region('region-2') - self.create_host('host1', 'server', '192.168.1.1', region=region) - self.create_host('host2', 'server', '192.168.1.2', region=region) - url = self.url + '/v1/hosts?region_id={}'.format(region['id']) - resp = self.get(url) - self.assertSuccessOk(resp) - hosts = resp.json() - self.assertEqual(2, len(hosts['hosts'])) - - def test_get_all_for_cloud(self): - cloud = self.create_cloud('cloud-2') - region = self.create_region(cloud=cloud) - self.create_host('host1', 'server', '192.168.1.1', cloud=cloud, - region=region) - self.create_host('host2', 'server', '192.168.1.2', cloud=cloud, - region=region) - url = self.url + '/v1/hosts?cloud_id={}'.format(cloud['id']) - resp = self.get(url) - self.assertSuccessOk(resp) - hosts = resp.json()['hosts'] - self.assertEqual(2, len(hosts)) - self.assertEqual(['host1', 'host2'], [h['name'] for h in hosts]) - - def test_ascending_sort_by_name(self): - response = self.get(self.url + '/v1/hosts', - sort_keys='name', sort_dir='asc') - self.assertSuccessOk(response) - hosts = response.json()['hosts'] - self.assertEqual(30, len(hosts)) - - def test_ascending_sort_by_name_and_id(self): - response = self.get(self.url + '/v1/hosts', - sort_keys='name,id', sort_dir='asc') - self.assertSuccessOk(response) - hosts = response.json()['hosts'] - self.assertEqual(30, len(hosts)) - - def test_ascending_sort_by_name_and_id_space_separated(self): - response = self.get(self.url + '/v1/hosts', - sort_keys='name id', sort_dir='asc') - self.assertSuccessOk(response) - hosts = response.json()['hosts'] - self.assertEqual(30, len(hosts)) - - def test_follows_next_link(self): - url = self.url + '/v1/hosts' - response = self.get(url) - self.assertSuccessOk(response) - json = response.json() - hosts = json['hosts'] - while hosts: - for link in json['links']: - if link['rel'] == 'next': - break - else: - break - response = self.get(link['href']) - self.assertSuccessOk(response) - json = response.json() - hosts = json['hosts'] diff --git a/craton/tests/functional/test_jsonpath_search.py b/craton/tests/functional/test_jsonpath_search.py deleted file mode 100644 index affb05a..0000000 --- a/craton/tests/functional/test_jsonpath_search.py +++ /dev/null @@ -1,550 +0,0 @@ -from craton import exceptions -from craton.tests import functional - -TEST_STRING = "I'm just a string" - -TEST_ARRAY = [ - 1, - 23.4, - True, - False, - 'false', - TEST_STRING, - { - 'bumbleywump': 'cucumberpatch', - 'literal_boolean': 'true' - }, - ['sub', 'array', True] -] - -TEST_DICT = { - 'foo': { - 'nested_string': 'Bumbleywump Cucumberpatch', - 'nested_bool': True, - 'nested_null': None, - 'nested_int': 1, - 'nested_float': 3.14, - 'nested_boolstr': 'false', - 'hyphenated-key': 'look-at-all-these-hyphens!', - }, - 'bar': TEST_ARRAY, - 'baz': 'zoo' -} - - -def _get_variables_for(name): - return { - '{}_dict'.format(name): TEST_DICT, - '{}_array'.format(name): TEST_ARRAY, - '{}_string'.format(name): TEST_STRING, - } - - -class JSONPathResolvedSearchTestCase(functional.TestCase): - - def setUp(self): - super(JSONPathResolvedSearchTestCase, self).setUp() - self.cloud = self.create_cloud( - name='cloud1', - variables=_get_variables_for('cloud1'), - ) - self.region = self.create_region( - name='region1', - cloud=self.cloud, - variables=_get_variables_for('region1'), - ) - self.cell = self.create_cell( - name='cell1', - cloud=self.cloud, - region=self.region, - variables=_get_variables_for('cell1') - ) - self.switches = [] - for i in range(2): - name = 'netdev{}'.format(str(i)) - self.switches.append(self.create_network_device( - name=name, - cloud=self.cloud, - region=self.region, - cell=self.cell, - device_type='switch', - ip_address='192.168.{}.1'.format(i), - **_get_variables_for(name) - )) - - self.hosts = [] - for i in range(len(self.switches) * 3): - name = 'host{}'.format(i) - self.hosts.append(self.create_host( - name=name, - cloud=self.cloud, - region=self.region, - cell=self.cell, - hosttype='server', - ip_address='192.168.{}.2'.format(i), - parent_id=self.switches[i % len(self.switches)]['id'], - **_get_variables_for(name) - )) - - def test_jsonpath_search_device_parent(self): - url = self.url + '/v1/hosts' - queries = [ - 'netdev1_dict.foo."hyphenated-key":"look-at-all-these-hyphens!"', - ] - expected_names = ['host1', 'host3', 'host5'] - - resp = self.get(url, vars=','.join(queries)) - hosts = resp.json()['hosts'] - parent_ids = set([h['parent_id'] for h in hosts]) - - self.assertEqual(3, len(hosts)) - self.assertEqual(1, len(parent_ids)) - self.assertEqual(self.switches[1]['id'], parent_ids.pop()) - self.assertListEqual( - sorted(expected_names), - sorted([h['name'] for h in hosts]) - ) - - def test_jsonpath_search_device_parent_override(self): - url = self.url + '/v1/hosts' - queries = [ - 'netdev1_dict.foo."hyphenated-key":"look-at-all-these-hyphens!"', - ] - variables_put = { - 'netdev1_dict': { - 'foo': { - 'hyphenated-key': 'look-at-all-these-hyphens' - } - } - } - self.put('{}/{}/variables'.format(url, self.hosts[3]['id']), - data=variables_put) - resp = self.get(url, vars=','.join(queries)) - hosts = resp.json()['hosts'] - parent_ids = set([h['parent_id'] for h in hosts]) - - self.assertEqual(2, len(hosts)) - self.assertEqual(1, len(parent_ids)) - self.assertEqual(self.switches[1]['id'], parent_ids.pop()) - - def test_jsonpath_search_device_child_vars_included(self): - url = self.url + '/v1/hosts' - queries = [ - 'netdev1_dict.foo."hyphenated-key":"look-at-all-these-hyphens!"', - ] - modified_id = self.hosts[0]['id'] - variables_put = { - 'netdev1_dict': { - 'foo': { - 'hyphenated-key': 'look-at-all-these-hyphens!' - } - } - } - self.put('{}/{}/variables'.format(url, modified_id), - data=variables_put) - expected_names = ['host0', 'host1', 'host3', 'host5'] - - resp = self.get(url, vars=','.join(queries)) - hosts = resp.json()['hosts'] - - self.assertEqual(4, len(hosts)) - self.assertListEqual( - sorted(expected_names), - sorted([h['name'] for h in hosts]) - ) - - def test_jsonpath_search_device_conjunctive_parent_vars(self): - url = self.url + '/v1/hosts' - queries = [ - 'netdev1_dict.foo."hyphenated-key":"look-at-all-these-hyphens!"', - 'region1_array[2]:true', - 'cloud1_dict.bar[3]:false', - ] - resp = self.get(url, vars=','.join(queries)) - hosts = resp.json()['hosts'] - parent_ids = set([h['parent_id'] for h in hosts]) - - self.assertEqual(3, len(hosts)) - self.assertEqual(1, len(parent_ids)) - self.assertEqual(self.switches[1]['id'], parent_ids.pop()) - - -class JSONPathSearchTestCaseMixin(object): - - resource = '' - - def get_resource_url(self): - return '{}/v1/{}'.format(self.url, self.resource) - - def setup_projects(self, projects): - created = [] - for name, variables in projects: - created.append(self.create_project( - name=name, - variables=variables - )) - return created - - def setup_clouds(self, clouds): - created = [] - for name, variables in clouds: - created.append(self.create_cloud( - name=name, - variables=variables - )) - return created - - def setup_regions(self, regions): - created = [] - cloud = self.create_cloud(name='cloud1') - for name, variables in regions: - created.append(self.create_region( - name=name, - cloud=cloud, - variables=variables - )) - return created - - def setup_cells(self, cells): - created = [] - cloud = self.create_cloud(name='cloud1') - region = self.create_region( - name='region1', - cloud=cloud - ) - for name, variables in cells: - created.append(self.create_cell( - name=name, - cloud=cloud, - region=region, - variables=variables - )) - return created - - def setup_networks(self, networks): - created = [] - cloud = self.create_cloud(name='cloud1') - region = self.create_region( - name='region1', - cloud=cloud - ) - for name, variables in networks: - created.append(self.create_network( - name=name, - cloud=cloud, - region=region, - cidr='192.168.0.0/24', - gateway='192.168.0.1', - netmask='255.255.255.0', - variables=variables - )) - return created - - def setup_network_devices(self, network_devices): - created = [] - cloud = self.create_cloud(name='cloud1') - region = self.create_region( - name='region1', - cloud=cloud - ) - for name, variables in network_devices: - created.append(self.create_network_device( - name=name, - cloud=cloud, - region=region, - device_type='switch', - ip_address='192.168.0.1', - **variables - )) - return created - - def setup_hosts(self, hosts): - created = [] - cloud = self.create_cloud(name='cloud1') - region = self.create_region( - name='region1', - cloud=cloud - ) - for name, variables in hosts: - created.append(self.create_host( - name=name, - cloud=cloud, - region=region, - hosttype='server', - ip_address='192.168.0.1', - **variables - )) - return created - - def setup_resources(self, resources): - setup_fn = { - "projects": self.setup_projects, - "clouds": self.setup_clouds, - "regions": self.setup_regions, - "cells": self.setup_cells, - "networks": self.setup_networks, - "network-devices": self.setup_network_devices, - "hosts": self.setup_hosts, - } - return setup_fn[self.resource](resources) - - def resources_from_response(self, resp): - return resp.json()[self.resource.replace('-', '_')] - - def get_resources(self, **params): - headers = None - if self.resource in ('projects',): - headers = self.root_headers - resp = self.get(self.get_resource_url(), headers=headers, - details='all', **params) - return resp - - def test_jsonpath_search_nested_string(self): - resources = ( - ('resource1', {'foo': TEST_DICT}), - ('resource2', {'foo': {'baz': 'nope'}}) - ) - created = self.setup_resources(resources) - - found = self.resources_from_response(self.get_resources( - vars='foo.foo.nested_string:"Bumbleywump Cucumberpatch"')) - - self.assertEqual(1, len(found)) - self.assertEqual(created[0]['id'], found[0]['id']) - self.assertEqual(created[0]['variables'], found[0]['variables']) - - def test_jsonpath_search_nested_string_wildcard(self): - resources = ( - ('resource1', {'foo': TEST_DICT}), - ('resource2', {'foo': {"baz": "zoom"}}) - ) - created = self.setup_resources(resources) - - found = self.resources_from_response( - self.get_resources(vars='foo.*:"zoo"')) - - self.assertEqual(1, len(found)) - self.assertEqual(created[0]['id'], found[0]['id']) - self.assertEqual(created[0]['variables'], found[0]['variables']) - - def test_jsonpath_search_array_string(self): - resources = ( - ('resource1', {'foo': TEST_ARRAY}), - ('resource2', {'foo': TEST_ARRAY}), - ('resource3', {'foo': ["I'm just a string", 1, 2, 3, 4, 'foo']}), - ) - created = self.setup_resources(resources) - - found = self.resources_from_response( - self.get_resources(vars='foo[5]:"I\'m just a string"')) - - self.assertEqual(2, len(found)) - self.assertListEqual(sorted([c['id'] for c in created[:2]]), - sorted([f['id'] for f in found])) - - def test_jsonpath_search_array_string_wildcard(self): - resources = ( - ('resource1', {'foo': TEST_ARRAY}), - ('resource2', {'foo': TEST_ARRAY}), - ('resource3', {'foo': ["I'm just a string", True]}), - ('resource4', {'foo': ['Bumbleywump Cucumberpatch']}), - ) - created = self.setup_resources(resources) - - found = self.resources_from_response( - self.get_resources(vars='foo[*]:"I\'m just a string"')) - - self.assertEqual(3, len(found)) - self.assertListEqual(sorted([c['id'] for c in created[:3]]), - sorted([f['id'] for f in found])) - - def test_jsonpath_search_nested_array_string(self): - resources = ( - ('resource1', {'foo': TEST_DICT}), - ('resource2', {'foo': TEST_DICT}), - ('resource3', {'foo': {"bar": ["I'm just a string", True]}}), - ('resource4', {'foo': TEST_ARRAY}), - ) - created = self.setup_resources(resources) - - found = self.resources_from_response( - self.get_resources(vars='foo.bar[*]:"I\'m just a string"')) - - self.assertEqual(3, len(found)) - self.assertListEqual(sorted([c['id'] for c in created[:3]]), - sorted([f['id'] for f in found])) - - def test_jsonpath_search_nested_int(self): - resources = ( - ('resource1', {'foo': TEST_DICT}), - ('resource2', {'foo': {"foo": {"nested_int": "1"}}}) - ) - created = self.setup_resources(resources) - - found = self.resources_from_response( - self.get_resources(vars='foo.foo.nested_int:1')) - - self.assertEqual(1, len(found)) - self.assertEqual(created[0]['id'], found[0]['id']) - self.assertEqual(created[0]['variables'], found[0]['variables']) - - def test_jsonpath_search_nested_float(self): - resources = ( - ('resource1', {'foo': TEST_DICT}), - ('resource2', {'foo': {"foo": {"nested_float": 3}}}) - ) - created = self.setup_resources(resources) - - found = self.resources_from_response( - self.get_resources(vars='foo.foo.nested_float:3.14')) - - self.assertEqual(1, len(found)) - self.assertEqual(created[0]['id'], found[0]['id']) - self.assertEqual(created[0]['variables'], found[0]['variables']) - - def test_jsonpath_search_nested_bool(self): - resources = ( - ('resource1', {'foo': TEST_DICT}), - ('resource2', {'foo': {"foo": {"nested_bool": 'true'}}}) - ) - created = self.setup_resources(resources) - - found = self.resources_from_response( - self.get_resources(vars='foo.foo.nested_bool:true')) - - self.assertEqual(1, len(found)) - self.assertEqual(created[0]['id'], found[0]['id']) - self.assertEqual(created[0]['variables'], found[0]['variables']) - - def test_jsonpath_search_nested_boolstr(self): - resources = ( - ('resource1', {'foo': TEST_DICT}), - ('resource2', {'foo': {"foo": {"nested_boolstr": False}}}) - ) - created = self.setup_resources(resources) - - found = self.resources_from_response( - self.get_resources(vars='foo.foo.nested_boolstr:"false"')) - - self.assertEqual(1, len(found)) - self.assertEqual(created[0]['id'], found[0]['id']) - self.assertEqual(created[0]['variables'], found[0]['variables']) - - def test_jsonpath_search_nested_null(self): - resources = ( - ('resource1', {'foo': TEST_DICT}), - ('resource2', {'foo': {"foo": {"nested_null": 'test'}}}) - ) - created = self.setup_resources(resources) - - found = self.resources_from_response( - self.get_resources(vars='foo.foo.nested_null:null')) - - self.assertEqual(1, len(found)) - self.assertEqual(created[0]['id'], found[0]['id']) - self.assertEqual(created[0]['variables'], found[0]['variables']) - - def test_jsonpath_search_hyphenated(self): - resources = ( - ('resource1', {'foo': TEST_DICT}), - ('resource2', {'foo': {"foo": {"hyphenated-key": 'test-test'}}}) - ) - created = self.setup_resources(resources) - - found = self.resources_from_response(self.get_resources( - vars='foo.foo."hyphenated-key":"look-at-all-these-hyphens!"')) - - self.assertEqual(1, len(found)) - self.assertEqual(created[0]['id'], found[0]['id']) - self.assertEqual(created[0]['variables'], found[0]['variables']) - - def test_jsonpath_search_key_with_period(self): - resources = ( - ('resource1', {'v3.0': TEST_DICT}), - ('resource2', {'v3.0': {"foo": {"hyphenated-key": 'test-test'}}}) - ) - created = self.setup_resources(resources) - - found = self.resources_from_response(self.get_resources( - vars='"v3.0".foo."hyphenated-key":"look-at-all-these-hyphens!"')) - - self.assertEqual(1, len(found)) - self.assertEqual(created[0]['id'], found[0]['id']) - self.assertEqual(created[0]['variables'], found[0]['variables']) - - def test_jsonpath_search_non_string_member(self): - self.setup_resources(( - ('resource1', {'v3.0': TEST_DICT}), - )) - - resp = self.get_resources( - vars='v3.0.foo."hyphenated-key":"look-at-all-these-hyphens!"') - self.assertBadRequest(resp) - self.assertEqual(exceptions.InvalidJSONPath.msg, - resp.json()['message']) - - def test_jsonpath_search_hyphenated_without_quotes(self): - self.setup_resources(( - ('resource1', {'v3.0': TEST_DICT}), - )) - - resp = self.get_resources( - vars='foo.hyphenated-key:"look-at-all-these-hyphens!"') - self.assertBadRequest(resp) - self.assertEqual(exceptions.InvalidJSONPath.msg, - resp.json()['message']) - - def test_jsonpath_search_invalid_first_key(self): - self.setup_resources(( - ('resource1', {'v3.0': TEST_DICT}), - )) - - resp = self.get_resources(vars='[*]foo.bar:"string"') - self.assertBadRequest(resp) - self.assertEqual(exceptions.InvalidJSONPath.msg, - resp.json()['message']) - - def test_jsonpath_search_bad_json_string_value(self): - self.setup_resources(( - ('resource1', {'v3.0': TEST_DICT}), - )) - - resp = self.get_resources(vars='foo.bar:string') - self.assertBadRequest(resp) - self.assertEqual(exceptions.InvalidJSONValue.msg, - resp.json()['message']) - - -class ProjectsJSONPathSearchTestCase(functional.TestCase, - JSONPathSearchTestCaseMixin): - resource = 'projects' - - -class CloudsJSONPathSearchTestCase(functional.TestCase, - JSONPathSearchTestCaseMixin): - resource = 'clouds' - - -class RegionsJSONPathSearchTestCase(functional.TestCase, - JSONPathSearchTestCaseMixin): - resource = 'regions' - - -class CellsJSONPathSearchTestCase(functional.TestCase, - JSONPathSearchTestCaseMixin): - resource = 'cells' - - -class NetworksJSONPathSearchTestCase(functional.TestCase, - JSONPathSearchTestCaseMixin): - resource = 'networks' - - -class NetworkDevicesJSONPathSearchTestCase(functional.TestCase, - JSONPathSearchTestCaseMixin): - resource = 'network-devices' - - -class HostsJSONPathSearchTestCase(functional.TestCase, - JSONPathSearchTestCaseMixin): - resource = 'hosts' diff --git a/craton/tests/functional/test_network_calls.py b/craton/tests/functional/test_network_calls.py deleted file mode 100644 index 05f2d13..0000000 --- a/craton/tests/functional/test_network_calls.py +++ /dev/null @@ -1,162 +0,0 @@ -from craton.tests.functional import TestCase - - -class APIV1NetworkSchemaTest(TestCase): - - def setUp(self): - super(APIV1NetworkSchemaTest, self).setUp() - self.cloud = self.create_cloud(name='cloud-1') - self.region = self.create_region(name='region-1', cloud=self.cloud) - self.networks_url = self.url + '/v1/networks' - self.cidr = '192.168.0.0/24' - self.netmask = '255.255.255.0' - self.gateway = '192.168.0.1' - - def test_network_create_with_required_works(self): - payload = { - 'cloud_id': self.cloud['id'], - 'region_id': self.region['id'], - 'name': 'a', - 'cidr': self.cidr, - 'netmask': self.netmask, - 'gateway': self.gateway, - } - resp = self.post(self.networks_url, data=payload) - self.assertEqual(201, resp.status_code) - - network = resp.json() - self.assertEqual('a', network['name']) - self.assertEqual(self.cloud['id'], network['cloud_id']) - self.assertEqual(self.region['id'], network['region_id']) - self.assertEqual(self.cidr, network['cidr']) - self.assertEqual(self.gateway, network['gateway']) - self.assertEqual(self.netmask, network['netmask']) - - def test_network_create_without_region_id_fails(self): - payload = { - 'cloud_id': self.cloud['id'], - 'name': 'a', - 'cidr': self.cidr, - 'netmask': self.netmask, - 'gateway': self.gateway, - } - network = self.post(self.networks_url, data=payload) - self.assertEqual(400, network.status_code) - msg = ( - "The request included the following errors:\n" - "- 'region_id' is a required property" - ) - self.assertEqual(network.json()['message'], msg) - - def test_network_create_without_cloud_id_fails(self): - payload = { - 'region_id': self.region['id'], - 'name': 'a', - 'cidr': self.cidr, - 'netmask': self.netmask, - 'gateway': self.gateway, - } - network = self.post(self.networks_url, data=payload) - self.assertEqual(400, network.status_code) - msg = ( - "The request included the following errors:\n" - "- 'cloud_id' is a required property" - ) - self.assertEqual(network.json()['message'], msg) - - def test_network_create_with_extra_id_property_fails(self): - payload = { - 'region_id': self.region['id'], - 'cloud_id': self.cloud['id'], - 'name': 'a', - 'cidr': self.cidr, - 'netmask': self.netmask, - 'gateway': self.gateway, - 'id': 3 - } - network = self.post(self.networks_url, data=payload) - self.assertEqual(400, network.status_code) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed ('id' was unexpected)" - ) - self.assertEqual(network.json()['message'], msg) - - def test_network_create_with_extra_created_at_property_fails(self): - payload = { - 'region_id': self.region['id'], - 'cloud_id': self.cloud['id'], - 'name': 'a', - 'cidr': self.cidr, - 'netmask': self.netmask, - 'gateway': self.gateway, - 'created_at': 'This should not work' - } - network = self.post(self.networks_url, data=payload) - self.assertEqual(400, network.status_code) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed ('created_at' was " - "unexpected)" - ) - self.assertEqual(network.json()['message'], msg) - - def test_network_create_with_extra_updated_at_property_fails(self): - payload = { - 'region_id': self.region['id'], - 'cloud_id': self.cloud['id'], - 'name': 'a', - 'cidr': self.cidr, - 'netmask': self.netmask, - 'gateway': self.gateway, - 'updated_at': 'This should not work' - } - network = self.post(self.networks_url, data=payload) - self.assertEqual(400, network.status_code) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed ('updated_at' was " - "unexpected)" - ) - self.assertEqual(network.json()['message'], msg) - - def test_network_create_missing_all_properties_fails(self): - url = self.url + '/v1/networks' - network = self.post(url, data={}) - self.assertEqual(400, network.status_code) - msg = ( - "The request included the following errors:\n" - "- 'cidr' is a required property\n" - "- 'cloud_id' is a required property\n" - "- 'gateway' is a required property\n" - "- 'name' is a required property\n" - "- 'netmask' is a required property\n" - "- 'region_id' is a required property" - ) - self.assertEqual(network.json()['message'], msg) - - def test_network_get_all_with_details(self): - payload = { - 'cloud_id': self.cloud['id'], - 'region_id': self.region['id'], - 'name': 'a', - 'cidr': self.cidr, - 'netmask': self.netmask, - 'gateway': self.gateway, - 'variables': {'a': 'b'}, - } - resp = self.post(self.networks_url, data=payload) - self.assertEqual(201, resp.status_code) - - payload['name'] = 'b' - resp = self.post(self.networks_url, data=payload) - self.assertEqual(201, resp.status_code) - - url = self.networks_url + '?details=all' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - networks = resp.json()['networks'] - - for network in networks: - self.assertTrue('variables' in network) - self.assertEqual({'a': 'b'}, network['variables']) diff --git a/craton/tests/functional/test_network_device_calls.py b/craton/tests/functional/test_network_device_calls.py deleted file mode 100644 index f5363b2..0000000 --- a/craton/tests/functional/test_network_device_calls.py +++ /dev/null @@ -1,115 +0,0 @@ -from craton.tests.functional import DeviceTestBase - - -class APIV1NetworkDeviceTest(DeviceTestBase): - - resource = 'network-devices' - - def test_create_with_parent_id(self): - parent = self.create_network_device( - name='test1', - cloud=self.cloud, - region=self.region, - device_type='switch', - ip_address='192.168.1.1', - ) - child = self.create_network_device( - name='test2', - cloud=self.cloud, - region=self.region, - device_type='switch', - ip_address='192.168.1.2', - parent_id=parent['id'], - ) - self.assertEqual(parent['id'], child['parent_id']) - - def test_update_with_parent_id(self): - parent = self.create_network_device( - name='test1', - cloud=self.cloud, - region=self.region, - device_type='switch', - ip_address='192.168.1.1', - ) - - child = self.create_network_device( - name='test2', - cloud=self.cloud, - region=self.region, - device_type='switch', - ip_address='192.168.1.2', - ) - self.assertIsNone(child['parent_id']) - - url = '{}/v1/network-devices/{}'.format(self.url, child['id']) - child_update_resp = self.put( - url, data={'parent_id': parent['id']} - ) - self.assertEqual(200, child_update_resp.status_code) - child_update = child_update_resp.json() - self.assertEqual(parent['id'], child_update['parent_id']) - - def test_update_with_parent_id_equal_id_fails(self): - network_device = self.create_network_device( - name='test1', - cloud=self.cloud, - region=self.region, - device_type='switch', - ip_address='192.168.1.1', - ) - - url = '{}/v1/network-devices/{}'.format(self.url, network_device['id']) - network_device_update_resp = self.put( - url, data={'parent_id': network_device['id']} - ) - self.assertEqual(400, network_device_update_resp.status_code) - - def test_update_with_parent_id_equal_descendant_id_fails(self): - parent = self.create_network_device( - name='test1', - cloud=self.cloud, - region=self.region, - device_type='switch', - ip_address='192.168.1.1', - ) - self.assertIsNone(parent['parent_id']) - - child = self.create_network_device( - name='test2', - cloud=self.cloud, - region=self.region, - device_type='switch', - ip_address='192.168.1.2', - parent_id=parent['id'], - ) - self.assertEqual(parent['id'], child['parent_id']) - - grandchild = self.create_network_device( - name='test3', - cloud=self.cloud, - region=self.region, - device_type='switch', - ip_address='192.168.1.3', - parent_id=child['id'], - ) - self.assertEqual(child['id'], grandchild['parent_id']) - - url = '{}/v1/network-devices/{}'.format(self.url, parent['id']) - parent_update_resp = self.put( - url, data={'parent_id': grandchild['id']} - ) - self.assertEqual(400, parent_update_resp.status_code) - - def test_network_device_create_missing_all_properties_fails(self): - url = self.url + '/v1/network-devices' - network_device = self.post(url, data={}) - self.assertEqual(400, network_device.status_code) - msg = ( - "The request included the following errors:\n" - "- 'cloud_id' is a required property\n" - "- 'device_type' is a required property\n" - "- 'ip_address' is a required property\n" - "- 'name' is a required property\n" - "- 'region_id' is a required property" - ) - self.assertEqual(network_device.json()['message'], msg) diff --git a/craton/tests/functional/test_network_interface_calls.py b/craton/tests/functional/test_network_interface_calls.py deleted file mode 100644 index 01e4134..0000000 --- a/craton/tests/functional/test_network_interface_calls.py +++ /dev/null @@ -1,70 +0,0 @@ -from craton.tests import functional - - -class APIv1NetworkInterfacesTest(functional.DeviceTestBase): - def setUp(self): - super(APIv1NetworkInterfacesTest, self).setUp() - self.interfaces_url = self.url + '/v1/network-interfaces' - - def test_associate_network_device_with_a_host(self): - host = self.create_host('host-0', 'server', '127.0.0.1') - - payload = { - 'name': 'lo', - 'ip_address': '127.0.0.1', - 'device_id': host['id'], - 'interface_type': 'loopback', - } - response = self.post(self.interfaces_url, data=payload) - self.assertSuccessCreated(response) - self.assertIn('Location', response.headers) - interface = response.json() - self.assertEqual( - '{}/{}'.format(self.interfaces_url, interface['id']), - response.headers['Location'] - ) - - def test_port_must_be_an_integer_on_create(self): - host = self.create_host('host-0', 'server', '127.0.0.1') - - payload = { - 'name': 'lo', - 'ip_address': '127.0.0.1', - 'device_id': host['id'], - 'interface_type': 'loopback', - 'port': 'asdf', - } - response = self.post(self.interfaces_url, data=payload) - self.assertBadRequest(response) - - def test_port_must_be_an_integer_on_update(self): - host = self.create_host('host-0', 'server', '127.0.0.1') - - payload = { - 'name': 'lo', - 'ip_address': '127.0.0.1', - 'device_id': host['id'], - 'interface_type': 'loopback', - 'port': 80, - } - response = self.post(self.interfaces_url, data=payload) - self.assertSuccessCreated(response) - interface = response.json() - - url = self.interfaces_url + '/{}'.format(interface['id']) - payload = {'port': 'asdf'} - response = self.put(url, data=payload) - self.assertBadRequest(response) - - def test_network_interface_create_missing_all_properties_fails(self): - url = self.url + '/v1/network-interfaces' - network_interface = self.post(url, data={}) - self.assertEqual(400, network_interface.status_code) - msg = ( - "The request included the following errors:\n" - "- 'device_id' is a required property\n" - "- 'interface_type' is a required property\n" - "- 'ip_address' is a required property\n" - "- 'name' is a required property" - ) - self.assertEqual(network_interface.json()['message'], msg) diff --git a/craton/tests/functional/test_project_calls.py b/craton/tests/functional/test_project_calls.py deleted file mode 100644 index 38863d8..0000000 --- a/craton/tests/functional/test_project_calls.py +++ /dev/null @@ -1,129 +0,0 @@ -from craton.tests import functional -from craton.tests.functional.test_variable_calls import \ - APIV1ResourceWithVariablesTestCase - - -class TestPaginationOfProjects(functional.TestCase): - def setUp(self): - super(TestPaginationOfProjects, self).setUp() - self.projects = [ - self.create_project('project-{}'.format(i)) - for i in range(0, 61) - ] - - def test_lists_first_thirty_projects(self): - response = self.get(self.url + '/v1/projects', - headers=self.root_headers) - self.assertSuccessOk(response) - json = response.json() - self.assertIn('projects', json) - projects = json['projects'] - self.assertEqual(30, len(projects)) - - def test_lists_projects_with_the_same_name(self): - self.create_project('project-0') - - response = self.get(self.url + '/v1/projects', - name='project-0', - headers=self.root_headers) - self.assertSuccessOk(response) - projects = response.json()['projects'] - self.assertEqual(2, len(projects)) - - -class APIV1ProjectTest(APIV1ResourceWithVariablesTestCase): - - resource = 'projects' - - def test_project_create_with_variables(self): - variables = {'a': 'b'} - project_name = 'test' - project = self.create_project(project_name, variables=variables) - self.assertEqual(project_name, project['name']) - self.assertEqual(variables, project['variables']) - - def test_create_project_supports_vars_ops(self): - project = self.create_project('test', variables={'a': 'b'}) - self.assert_vars_get_expected(project['id'], {'a': 'b'}) - self.assert_vars_can_be_set(project['id']) - self.assert_vars_can_be_deleted(project['id']) - - def test_project_create_with_duplicate_name_works(self): - project_name = 'test' - self.create_project(project_name) - url = self.url + '/v1/projects' - payload = {'name': project_name} - project = self.post(url, headers=self.root_headers, data=payload) - self.assertEqual(201, project.status_code) - - def test_project_get_all_with_name_filter(self): - proj1 = 'test1' - proj2 = 'test2' - self.create_project(proj2) - for i in range(3): - self.create_project(proj1) - url = self.url + '/v1/projects?name={}'.format(proj1) - resp = self.get(url, headers=self.root_headers) - projects = resp.json()['projects'] - self.assertEqual(3, len(projects)) - for project in projects: - self.assertEqual(proj1, project['name']) - - def test_get_project_details(self): - project_name = 'test' - project_vars = {"who": "that"} - project = self.create_project(project_name, variables=project_vars) - url = self.url + '/v1/projects/{}'.format(project['id']) - project_with_detail = self.get(url, headers=self.root_headers) - self.assertEqual(project_name, project_with_detail.json()['name']) - self.assertEqual(project_vars, project_with_detail.json()['variables']) - - def test_project_delete(self): - project1 = self.create_project('test1') - url = self.url + '/v1/projects' - projects = self.get(url, headers=self.root_headers) - # NOTE(thomasem): Have to include the default project created by - # test setup. - self.assertEqual(2, len(projects.json()['projects'])) - - delurl = self.url + '/v1/projects/{}'.format(project1['id']) - self.delete(delurl, headers=self.root_headers) - - projects = self.get(url, headers=self.root_headers) - self.assertEqual(1, len(projects.json()['projects'])) - - def test_project_variables_update(self): - project_name = 'test' - project = self.create_project(project_name) - variables = {"bumbleywump": "cucumberpatch"} - - put_url = self.url + '/v1/projects/{}/variables'.format(project['id']) - resp = self.put(put_url, headers=self.root_headers, data=variables) - self.assertEqual(200, resp.status_code) - - get_url = self.url + '/v1/projects/{}'.format(project['id']) - project = self.get(get_url, headers=self.root_headers) - self.assertEqual(variables, project.json()['variables']) - - def test_project_variables_delete(self): - project_name = 'test' - delete_key = 'bumbleywump' - variables = { - delete_key: 'cucumberpatch' - } - expected_vars = {'foo': 'bar'} - variables.update(expected_vars) - - project = self.create_project(project_name, variables=variables) - self.assert_vars_get_expected(project['id'], variables) - self.assert_vars_can_be_deleted(project['id']) - - def test_project_create_missing_all_properties_fails(self): - url = self.url + '/v1/projects' - project = self.post(url, data={}) - self.assertEqual(400, project.status_code) - msg = ( - "The request included the following errors:\n" - "- 'name' is a required property" - ) - self.assertEqual(project.json()['message'], msg) diff --git a/craton/tests/functional/test_region_calls.py b/craton/tests/functional/test_region_calls.py deleted file mode 100644 index 76bc58a..0000000 --- a/craton/tests/functional/test_region_calls.py +++ /dev/null @@ -1,288 +0,0 @@ -import urllib.parse - -from craton.tests.functional import TestCase - - -class RegionTests(TestCase): - def setUp(self): - super(RegionTests, self).setUp() - self.cloud = self.create_cloud() - - def create_cloud(self): - return super(RegionTests, self).create_cloud( - name='cloud-1', - variables={'version': 'x'}, - ) - - def create_region(self, name, variables=None): - return super(RegionTests, self).create_region( - name=name, - cloud=self.cloud, - variables=variables - ) - - -class APIV1RegionTest(RegionTests): - """Test cases for /region calls. - One set of data for the test is generated by fake data generateion - script during test module setup. - """ - - def test_create_region_full_data(self): - # Test with full set of allowed parameters - values = {"name": "region-new", - "note": "This is region-new.", - "cloud_id": self.cloud['id'], - "variables": {"a": "b"}} - url = self.url + '/v1/regions' - resp = self.post(url, data=values) - self.assertEqual(201, resp.status_code) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "{}/{}".format(url, resp.json()['id']) - ) - self.assertEqual(values['name'], resp.json()['name']) - - def test_create_region_without_variables(self): - values = {"name": "region-two", - "note": "This is region-two", - "cloud_id": self.cloud['id']} - url = self.url + '/v1/regions' - resp = self.post(url, data=values) - self.assertEqual(201, resp.status_code) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "{}/{}".format(url, resp.json()['id']) - ) - self.assertEqual("region-two", resp.json()['name']) - - def test_create_region_with_no_name_fails(self): - values = {"note": "This is region one.", "cloud_id": self.cloud['id']} - url = self.url + '/v1/regions' - resp = self.post(url, data=values) - self.assertEqual(resp.status_code, 400) - err_msg = ( - "The request included the following errors:\n" - "- 'name' is a required property" - ) - self.assertEqual(resp.json()['message'], err_msg) - - def test_create_region_with_no_cloud_id_fails(self): - values = {"name": "I don't work at all, you know."} - url = self.url + '/v1/regions' - resp = self.post(url, data=values) - self.assertEqual(resp.status_code, 400) - err_msg = ( - "The request included the following errors:\n" - "- 'cloud_id' is a required property" - ) - self.assertEqual(resp.json()['message'], err_msg) - - def test_create_region_with_duplicate_name_fails(self): - self.create_region("ORD135") - - values = {"name": "ORD135", "cloud_id": self.cloud['id']} - url = self.url + '/v1/regions' - resp = self.post(url, data=values) - self.assertEqual(409, resp.status_code) - - def test_create_region_with_extra_id_property_fails(self): - values = {"name": "test", 'cloud_id': self.cloud['id'], "id": 101} - url = self.url + '/v1/regions' - resp = self.post(url, data=values) - self.assertEqual(resp.status_code, 400) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed ('id' was unexpected)" - ) - self.assertEqual(resp.json()['message'], msg) - - def test_create_region_with_extra_created_at_property_fails(self): - values = {"name": "test", 'cloud_id': self.cloud['id'], - "created_at": "some date"} - url = self.url + '/v1/regions' - resp = self.post(url, data=values) - self.assertEqual(resp.status_code, 400) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed " - "('created_at' was unexpected)" - ) - self.assertEqual(resp.json()['message'], msg) - - def test_create_region_with_extra_updated_at_property_fails(self): - values = {"name": "test", 'cloud_id': self.cloud['id'], - "updated_at": "some date"} - url = self.url + '/v1/regions' - resp = self.post(url, data=values) - self.assertEqual(resp.status_code, 400) - msg = ( - "The request included the following errors:\n" - "- Additional properties are not allowed " - "('updated_at' was unexpected)" - ) - self.assertEqual(resp.json()['message'], msg) - - def test_region_create_missing_all_properties_fails(self): - url = self.url + '/v1/regions' - region = self.post(url, data={}) - self.assertEqual(400, region.status_code) - msg = ( - "The request included the following errors:\n" - "- 'cloud_id' is a required property\n" - "- 'name' is a required property" - ) - self.assertEqual(region.json()['message'], msg) - - def test_regions_get_all(self): - self.create_region("ORD1") - self.create_region("ORD2") - url = self.url + '/v1/regions' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - self.assertEqual(2, len(resp.json())) - - def test_regions_get_all_with_details(self): - self.create_region('ORD1', variables={'a': 'b'}) - self.create_region('ORD2', variables={'c': 'd'}) - url = self.url + '/v1/regions?details=all' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - regions = resp.json()['regions'] - self.assertEqual(2, len(regions)) - for region in regions: - self.assertTrue('variables' in region) - for region in regions: - if region['name'] == 'ORD1': - self.assertEqual({'a': 'b', 'version': 'x'}, - region['variables']) - if region['name'] == 'ORD2': - self.assertEqual({'c': 'd', 'version': 'x'}, - region['variables']) - - def test_regions_get_all_with_name_filter(self): - self.create_region("ORD1") - self.create_region("ORD2") - url = self.url + '/v1/regions?name=ORD1' - resp = self.get(url) - self.assertEqual(200, resp.status_code) - regions = resp.json()['regions'] - self.assertEqual(1, len(regions)) - self.assertEqual('ORD1', regions[0]['name']) - - def test_regions_get_all_for_cloud(self): - for i in range(2): - self.create_region("ORD{}".format(str(i))) - url = self.url + '/v1/regions?cloud_id={}'.format(self.cloud['id']) - resp = self.get(url) - self.assertEqual(200, resp.status_code) - regions = resp.json()['regions'] - self.assertEqual(2, len(regions)) - self.assertEqual(['ORD0', 'ORD1'], [r['name'] for r in regions]) - - def test_region_with_non_existing_filters(self): - self.create_region("ORD1") - url = self.url + '/v1/regions?name=idontexist' - resp = self.get(url) - self.assertEqual(404, resp.status_code) - - def test_region_get_details_for_region(self): - regvars = {"a": "b", "one": "two"} - region = self.create_region("ORD1", variables=regvars) - url = self.url + '/v1/regions/{}'.format(region['id']) - resp = self.get(url) - region = resp.json() - self.assertEqual(region['name'], 'ORD1') - - def test_region_get_details_has_resolved_vars(self): - regvars = {"a": "b", "one": "two"} - region = self.create_region("ORD1", variables=regvars) - url = self.url + '/v1/regions/{}'.format(region['id']) - resp = self.get(url) - region = resp.json() - self.assertEqual(region['name'], 'ORD1') - expected = {"a": "b", "one": "two", "version": "x"} - self.assertEqual(expected, region['variables']) - - def test_region_get_details_with_unresolved_vars(self): - regvars = {"a": "b", "one": "two"} - region = self.create_region("ORD1", variables=regvars) - r_id = region['id'] - url = self.url + '/v1/regions/{}?resolved-values=false'.format(r_id) - resp = self.get(url) - region = resp.json() - self.assertEqual(region['name'], 'ORD1') - self.assertEqual(regvars, region['variables']) - - -class TestPagination(RegionTests): - - def setUp(self): - super(TestPagination, self).setUp() - self.regions = [self.create_region('region-{}'.format(i)) - for i in range(0, 61)] - self.addCleanup(self.delete_regions, self.regions) - - def test_list_first_thirty_regions(self): - url = self.url + '/v1/regions' - response = self.get(url) - self.assertSuccessOk(response) - json = response.json() - self.assertIn('regions', json) - self.assertEqual(30, len(json['regions'])) - self.assertListEqual([r['id'] for r in self.regions[:30]], - [r['id'] for r in json['regions']]) - - def test_get_returns_correct_next_link(self): - url = self.url + '/v1/regions' - thirtieth_region = self.regions[29] - response = self.get(url) - self.assertSuccessOk(response) - json = response.json() - self.assertIn('links', json) - for link_rel in json['links']: - if link_rel['rel'] == 'next': - break - else: - self.fail("No 'next' link was returned in response") - - parsed_next = urllib.parse.urlparse(link_rel['href']) - self.assertIn('marker={}'.format(thirtieth_region['id']), - parsed_next.query) - - def test_get_returns_correct_prev_link(self): - first_region = self.regions[0] - thirtieth_region = self.regions[29] - url = self.url + '/v1/regions?marker={}'.format(thirtieth_region['id']) - response = self.get(url) - self.assertSuccessOk(response) - json = response.json() - self.assertIn('links', json) - for link_rel in json['links']: - if link_rel['rel'] == 'prev': - break - else: - self.fail("No 'prev' link was returned in response") - - parsed_prev = urllib.parse.urlparse(link_rel['href']) - self.assertIn('marker={}'.format(first_region['id']), - parsed_prev.query) - - def test_follow_all_region_links(self): - url = self.url + '/v1/regions' - response = self.get(url) - self.assertSuccessOk(response) - json = response.json() - regions = json['regions'] - while regions: - for link in json['links']: - if link['rel'] == 'next': - break - else: - break - response = self.get(link['href']) - self.assertSuccessOk(response) - json = response.json() - regions = json['regions'] diff --git a/craton/tests/functional/test_user_calls.py b/craton/tests/functional/test_user_calls.py deleted file mode 100644 index 35c326d..0000000 --- a/craton/tests/functional/test_user_calls.py +++ /dev/null @@ -1,29 +0,0 @@ -from craton.tests import functional - - -class UserTests(functional.TestCase): - - def test_create_user(self): - project = self.create_project('test') - url = self.url + '/v1/users' - payload = {'username': 'testuser', 'project_id': project['id']} - user = self.post(url, data=payload) - self.assertEqual(201, user.status_code) - self.assertEqual(payload['username'], user.json()['username']) - self.assertEqual(payload['project_id'], user.json()['project_id']) - - def test_create_user_with_admin_priv(self): - project = self.create_project('test') - url = self.url + '/v1/users' - payload = {'username': 'testuser', 'project_id': project['id'], - 'is_admin': True} - user = self.post(url, headers=self.root_headers, data=payload) - self.assertEqual(201, user.status_code) - self.assertEqual(payload['username'], user.json()['username']) - self.assertEqual(payload['is_admin'], user.json()['is_admin']) - - def test_create_user_with_no_project_id_fails(self): - url = self.url + '/v1/users' - payload = {'username': 'testuser'} - user = self.post(url, headers=self.root_headers, data=payload) - self.assertEqual(400, user.status_code) diff --git a/craton/tests/functional/test_variable_calls.py b/craton/tests/functional/test_variable_calls.py deleted file mode 100644 index 4837e1a..0000000 --- a/craton/tests/functional/test_variable_calls.py +++ /dev/null @@ -1,57 +0,0 @@ -from craton.tests.functional import TestCase - - -class APIV1ResourceWithVariablesTestCase(TestCase): - """Base test case for resources that have variables mixed in""" - - resource = '' # Test classes that mix in should set - path = '/v1/{resource}/{resource_id}/variables' - - def get_vars_url(self, resource_id): - return self.url + self.path.format( - resource=self.resource, resource_id=resource_id) - - def get_current_vars(self, resource_id): - url = self.get_vars_url(resource_id) - response = self.get(url) - self.assertEqual(200, response.status_code) - return response.json()['variables'] - - def assert_vars_get_expected(self, resource_id, expected_vars): - self.assertEqual(expected_vars, self.get_current_vars(resource_id)) - - def assert_vars_can_be_set(self, resource_id): - """Asserts new vars can be added to the existing vars, if any""" - # track the expected current state of vars for this resource, - # verifying expectations - current_vars = self.get_current_vars(resource_id) - payload = {'string-key': 'string-value', 'num-key': 47, - 'bookean-key': False, 'none-key': None, - 'object-key': {'a': 1, 'b': 2}, - 'list-key': ['a', 'b', 1, 2, 3, True, None]} - - url = self.get_vars_url(resource_id) - response = self.put(url, data=payload) - current_vars.update(payload) - self.assertEqual(200, response.status_code) - self.assertEqual(current_vars, response.json()['variables']) - self.assertEqual(current_vars, self.get_current_vars(resource_id)) - - def assert_vars_can_be_deleted(self, resource_id): - """Asserts that new vars can be added, then deleted""" - # track the expected current state of vars for this resource, - # verifying expectations - current_vars = self.get_current_vars(resource_id) - - url = self.get_vars_url(resource_id) - added_vars = {'will-keep': 42, 'will-delete': 47} - response = self.put(url, data=added_vars) - current_vars.update(added_vars) - self.assertEqual(200, response.status_code) - self.assertEqual(current_vars, response.json()['variables']) - self.assertEqual(current_vars, self.get_current_vars(resource_id)) - - response = self.delete(url, body=['will-delete', 'non-existent-key']) - del current_vars['will-delete'] - self.assertEqual(204, response.status_code) - self.assertEqual(current_vars, self.get_current_vars(resource_id)) diff --git a/craton/tests/unit/__init__.py b/craton/tests/unit/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/craton/tests/unit/db/__init__.py b/craton/tests/unit/db/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/craton/tests/unit/db/base.py b/craton/tests/unit/db/base.py deleted file mode 100644 index 4d40e99..0000000 --- a/craton/tests/unit/db/base.py +++ /dev/null @@ -1,38 +0,0 @@ -import fixtures - -from craton.db.sqlalchemy import api as sa_api -from craton.db.sqlalchemy import models -from craton.tests import TestCase - - -_DB_SCHEMA = None - - -class Database(fixtures.Fixture): - def __init__(self): - self.engine = sa_api.get_engine() - self.engine.dispose() - conn = self.engine.connect() - self.setup_sqlite() - self._DB = "".join(line for line in conn.connection.iterdump()) - self.engine.dispose() - - def setup_sqlite(self): - # NOTE(sulo): there is no version here. We will be using - # Alembic in the near future to manage migrations. - models.Base.metadata.create_all(self.engine) - - def _setUp(self): - conn = self.engine.connect() - conn.connection.executescript(self._DB) - self.addCleanup(self.engine.dispose) - - -class DBTestCase(TestCase): - - def setUp(self): - super(DBTestCase, self).setUp() - global _DB_SCHEMA - if not _DB_SCHEMA: - _DB_SCHEMA = Database() - self.useFixture(_DB_SCHEMA) diff --git a/craton/tests/unit/db/sqlalchemy/__init__.py b/craton/tests/unit/db/sqlalchemy/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/craton/tests/unit/db/sqlalchemy/test_projects.py b/craton/tests/unit/db/sqlalchemy/test_projects.py deleted file mode 100644 index 97991c3..0000000 --- a/craton/tests/unit/db/sqlalchemy/test_projects.py +++ /dev/null @@ -1,15 +0,0 @@ -from craton.db.sqlalchemy import api as dbapi -from craton.tests.unit.db import base - - -class TestProjectsGetAll(base.DBTestCase): - - def test_link_params_dictionary(self): - _, links = dbapi.projects_get_all( - self.context, - filters={'name': None, 'id': None, - 'sort_keys': ['id', 'created_at'], 'sort_dir': 'asc'}, - pagination_params={'limit': 30, 'marker': None}, - ) - self.assertNotIn('name', links) - self.assertNotIn('id', links) diff --git a/craton/tests/unit/db/test_cells.py b/craton/tests/unit/db/test_cells.py deleted file mode 100644 index 9842cc1..0000000 --- a/craton/tests/unit/db/test_cells.py +++ /dev/null @@ -1,116 +0,0 @@ -import uuid - -from craton import exceptions -from craton.db import api as dbapi -from craton.tests.unit.db import base - -project_id1 = uuid.uuid4().hex -cloud_id1 = uuid.uuid4().hex - -cell1 = {'region_id': 1, 'project_id': project_id1, 'name': 'cell1', - "cloud_id": cloud_id1} -cell1_region2 = {'region_id': 2, 'project_id': project_id1, 'name': 'cell1', - "cloud_id": cloud_id1} -cell2 = {'region_id': 1, 'project_id': project_id1, 'name': 'cell2', - "cloud_id": cloud_id1} - -cells = (cell1, cell1_region2, cell2) -default_pagination = {'limit': 30, 'marker': None} - - -class CellsDBTestCase(base.DBTestCase): - - def test_cells_create(self): - try: - dbapi.cells_create(self.context, cell1) - except Exception: - self.fail("Cell create raised unexpected exception") - - def test_duplicate_cell_create_raises_409(self): - dbapi.cells_create(self.context, cell1) - self.assertRaises(exceptions.DuplicateCell, dbapi.cells_create, - self.context, cell1) - - def test_cells_get_all(self): - dbapi.cells_create(self.context, cell1) - filters = { - "region_id": cell1["region_id"], - } - res, _ = dbapi.cells_get_all(self.context, filters, default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0]['name'], 'cell1') - - def test_cells_get_all_filter_name(self): - for cell in cells: - dbapi.cells_create(self.context, cell) - setup_res, _ = dbapi.cells_get_all(self.context, {}, - default_pagination) - self.assertGreater(len(setup_res), 2) - - filters = { - "name": cell1["name"], - } - res, _ = dbapi.cells_get_all(self.context, filters, default_pagination) - self.assertEqual(len(res), 2) - for cell in res: - self.assertEqual(cell['name'], 'cell1') - - def test_cells_get_all_filter_id(self): - for cell in cells: - dbapi.cells_create(self.context, cell) - setup_res, _ = dbapi.cells_get_all(self.context, {}, - default_pagination) - self.assertGreater(len(setup_res), 2) - self.assertEqual( - len([cell for cell in setup_res if cell['id'] == 1]), 1 - ) - - filters = { - "id": 1, - } - res, _ = dbapi.cells_get_all(self.context, filters, default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0]['id'], 1) - - def test_cells_get_all_with_filters(self): - res = dbapi.cells_create(self.context, cell1) - variables = {"key1": "value1", "key2": "value2"} - dbapi.variables_update_by_resource_id( - self.context, "cells", res.id, variables - ) - filters = { - "vars": "key2:value2", - "region_id": cell1["region_id"], - } - res, _ = dbapi.cells_get_all(self.context, filters, default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0]['name'], 'cell1') - - def test_cells_get_all_with_filters_noexist(self): - res = dbapi.cells_create(self.context, cell1) - variables = {"key1": "value1", "key2": "value2"} - dbapi.variables_update_by_resource_id( - self.context, "cells", res.id, variables - ) - filters = {} - filters["vars"] = "key2:value5" - res, _ = dbapi.cells_get_all(self.context, filters, default_pagination) - self.assertEqual(len(res), 0) - - def test_cell_delete(self): - create_res = dbapi.cells_create(self.context, cell1) - # First make sure we have the cell - res = dbapi.cells_get_by_id(self.context, create_res.id) - self.assertEqual(res.name, 'cell1') - - dbapi.cells_delete(self.context, res.id) - self.assertRaises(exceptions.NotFound, dbapi.cells_get_by_id, - self.context, res.id) - - def test_cell_update(self): - create_res = dbapi.cells_create(self.context, cell1) - res = dbapi.cells_get_by_id(self.context, create_res.id) - self.assertEqual(res.name, 'cell1') - new_name = 'cell1_New' - res = dbapi.cells_update(self.context, res.id, {'name': 'cell1_New'}) - self.assertEqual(res.name, new_name) diff --git a/craton/tests/unit/db/test_clouds.py b/craton/tests/unit/db/test_clouds.py deleted file mode 100644 index 63f30b0..0000000 --- a/craton/tests/unit/db/test_clouds.py +++ /dev/null @@ -1,89 +0,0 @@ -import uuid - -from craton.db import api as dbapi -from craton.tests.unit.db import base -from craton import exceptions - -default_pagination = {'limit': 30, 'marker': None} - -project_id1 = uuid.uuid4().hex -cloud1 = {'project_id': project_id1, 'name': 'cloud1'} - - -class CloudsDBTestCase(base.DBTestCase): - - def test_cloud_create(self): - try: - dbapi.clouds_create(self.context, cloud1) - except Exception: - self.fail("Cloud create raised unexpected exception") - - def test_cloud_create_duplicate_name_raises(self): - dbapi.clouds_create(self.context, cloud1) - self.assertRaises(exceptions.DuplicateCloud, dbapi.clouds_create, - self.context, cloud1) - - def test_clouds_get_all(self): - dbapi.clouds_create(self.context, cloud1) - filters = {} - res, _ = dbapi.clouds_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0]['name'], 'cloud1') - - def test_clouds_get_all_with_var_filters(self): - res = dbapi.clouds_create(self.context, cloud1) - variables = {"key1": "value1", "key2": "value2"} - dbapi.variables_update_by_resource_id( - self.context, "clouds", res.id, variables - ) - filters = {} - filters["vars"] = "key1:value1" - clouds, _ = dbapi.clouds_get_all( - self.context, filters, default_pagination, - ) - self.assertEqual(len(clouds), 1) - self.assertEqual(clouds[0].name, cloud1['name']) - - def test_clouds_get_all_with_var_filters_noexist(self): - res = dbapi.clouds_create(self.context, cloud1) - variables = {"key1": "value1", "key2": "value2"} - dbapi.variables_update_by_resource_id( - self.context, "clouds", res.id, variables - ) - filters = {} - filters["vars"] = "key1:value12" - clouds, _ = dbapi.clouds_get_all( - self.context, filters, default_pagination, - ) - self.assertEqual(len(clouds), 0) - - def test_cloud_get_by_name(self): - dbapi.clouds_create(self.context, cloud1) - res = dbapi.clouds_get_by_name(self.context, cloud1['name']) - self.assertEqual(res.name, 'cloud1') - - def test_cloud_get_by_id(self): - dbapi.clouds_create(self.context, cloud1) - res = dbapi.clouds_get_by_id(self.context, 1) - self.assertEqual(res.name, 'cloud1') - - def test_cloud_update(self): - dbapi.clouds_create(self.context, cloud1) - res = dbapi.clouds_get_by_id(self.context, 1) - self.assertEqual(res.name, 'cloud1') - new_name = "cloud_New1" - res = dbapi.clouds_update(self.context, res.id, - {'name': 'cloud_New1'}) - self.assertEqual(res.name, new_name) - - def test_cloud_delete(self): - dbapi.clouds_create(self.context, cloud1) - # First make sure we have the cloud - res = dbapi.clouds_get_by_name(self.context, cloud1['name']) - self.assertEqual(res.name, 'cloud1') - - dbapi.clouds_delete(self.context, res.id) - self.assertRaises(exceptions.NotFound, - dbapi.clouds_get_by_name, - self.context, 'fake-cloud') diff --git a/craton/tests/unit/db/test_devices.py b/craton/tests/unit/db/test_devices.py deleted file mode 100644 index bcd9b0a..0000000 --- a/craton/tests/unit/db/test_devices.py +++ /dev/null @@ -1,704 +0,0 @@ -import uuid - -from netaddr import IPAddress - -from craton import exceptions -from craton.db import api as dbapi -from craton.tests.unit.db import base - -default_pagination = {'limit': 30, 'marker': None} - - -class BaseDevicesDBTestCase(base.DBTestCase): - - mock_project_id = uuid.uuid4().hex - - def make_project(self, name, **variables): - project = dbapi.projects_create( - self.context, - {"name": name, - "variables": variables}) - return str(project.id) - - def make_cloud(self, project_id, name, **variables): - cloud = dbapi.clouds_create( - self.context, - {'name': name, - 'project_id': project_id, - 'variables': variables}) - return cloud.id - - def make_region(self, project_id, cloud_id, name, **variables): - region = dbapi.regions_create( - self.context, - {'name': name, - 'project_id': project_id, - 'cloud_id': cloud_id, - 'variables': variables}) - return region.id - - def make_cell(self, project_id, cloud_id, region_id, name, **variables): - cell = dbapi.cells_create( - self.context, - {'name': name, - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'variables': variables}) - return cell.id - - def make_host(self, project_id, cloud_id, region_id, name, ip_address, - host_type, cell_id=None, parent_id=None, labels=None, - **variables): - if cell_id: - host = {'name': name, - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'cell_id': cell_id, - 'ip_address': ip_address, - 'parent_id': parent_id, - 'device_type': host_type, - 'active': True, - 'labels': set() if labels is None else labels, - 'variables': variables} - else: - host = {'name': name, - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'ip_address': ip_address, - 'parent_id': parent_id, - 'device_type': host_type, - 'active': True, - 'labels': set() if labels is None else labels, - 'variables': variables} - - host = dbapi.hosts_create(self.context, host) - return host.id - - def make_network_device( - self, project_id, cloud_id, region_id, name, ip_address, - device_type, cell_id=None, parent_id=None, **variables - ): - network_device_data = { - 'name': name, - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'cell_id': cell_id, - 'ip_address': ip_address, - 'parent_id': parent_id, - 'device_type': device_type, - 'active': True, - 'variables': variables, - } - - network_device = dbapi.network_devices_create( - self.context, network_device_data - ) - return network_device.id - - -class DevicesDBTestCase(BaseDevicesDBTestCase): - - def setUp(self): - super().setUp() - project_id = self.make_project('project_1') - cloud_id = self.make_cloud(project_id, 'cloud_1') - region_id = self.make_region(project_id, cloud_id, 'region_1') - net_device1_id = self.make_network_device( - project_id, cloud_id, region_id, 'switch1.example.com', - IPAddress('10.1.2.101'), 'switch' - ) - net_device2_id = self.make_network_device( - project_id, cloud_id, region_id, 'switch2.example.com', - IPAddress('10.1.2.102'), 'switch', parent_id=net_device1_id - ) - host1_id = self.make_host( - project_id, cloud_id, region_id, 'www1.example.com', - IPAddress(u'10.1.2.103'), 'server', parent_id=net_device2_id - ) - host2_id = self.make_host( - project_id, cloud_id, region_id, 'www2.example.com', - IPAddress(u'10.1.2.104'), 'container', parent_id=host1_id - ) - host3_id = self.make_host( - project_id, cloud_id, region_id, 'www3.example.com', - IPAddress(u'10.1.2.105'), 'server' - ) - - self.parent = net_device1_id - self.children = [net_device2_id] - self.descendants = [net_device2_id, host1_id, host2_id] - self.all = [ - net_device1_id, net_device2_id, host1_id, host2_id, host3_id - ] - - def test_devices_get_all(self): - devices, _ = dbapi.devices_get_all( - self.context, {}, default_pagination - ) - - self.assertEqual(self.all, [device.id for device in devices]) - - def test_devices_get_all_children(self): - devices, _ = dbapi.devices_get_all( - self.context, {'parent_id': self.parent}, default_pagination - ) - - self.assertEqual(self.children, [device.id for device in devices]) - - def test_devices_get_all_descendants(self): - devices, _ = dbapi.devices_get_all( - self.context, - {'parent_id': self.parent, 'descendants': True}, - default_pagination - ) - - self.assertEqual(self.descendants, [device.id for device in devices]) - - -class HostsDBTestCase(BaseDevicesDBTestCase): - - def make_very_small_cloud(self, with_cell=False): - project_id = self.make_project('project_1', foo='P1', zoo='P2', - boo='P3') - cloud_id = self.make_cloud(project_id, 'cloud_1', zoo='CL1') - region_id = self.make_region( - project_id, - cloud_id, - 'region_1', - foo='R1', bar='R2', bax='R3') - if with_cell: - cell_id = self.make_cell(project_id, cloud_id, region_id, 'cell_1', - bar='C2') - else: - cell_id = None - host_id = self.make_host(project_id, cloud_id, region_id, - 'www1.example.com', - IPAddress(u'10.1.2.101'), 'server', - cell_id=cell_id, foo='H1', baz='H3') - return project_id, cloud_id, region_id, cell_id, host_id - - def test_hosts_create(self): - # Need to do this query despite creation above because other - # elements (cell, region) were in separate committed sessions - # when the host was created. Verify these linked elements load - # correctly - project_id, cloud_id, region_id, cell_id, host_id = \ - self.make_very_small_cloud(with_cell=True) - host = dbapi.hosts_get_by_id(self.context, host_id) - self.assertEqual(host.region.id, region_id) - self.assertEqual(host.region.name, 'region_1') - self.assertEqual(host.cell.id, cell_id) - self.assertEqual(host.cell.name, 'cell_1') - - # Verify resolved variables/blames override properly - self.assertEqual( - [obj.id for obj in host.resolution_order], - [host_id, cell_id, region_id, cloud_id, uuid.UUID(project_id)]) - - self.assertEqual( - [variables for variables in host.resolution_order_variables], - [{'foo': 'H1', 'baz': 'H3'}, - {'bar': 'C2'}, - {'foo': 'R1', 'bar': 'R2', 'bax': 'R3'}, - {'zoo': 'CL1'}, - {'foo': 'P1', 'zoo': 'P2', 'boo': 'P3'}]) - - self.assertEqual( - host.resolved, - {'foo': 'H1', 'bar': 'C2', 'baz': 'H3', 'bax': 'R3', 'zoo': 'CL1', - 'boo': 'P3'}) - - blame = host.blame(['foo', 'bar', 'zoo', 'boo']) - self.assertEqual(blame['foo'].source.name, 'www1.example.com') - self.assertEqual(blame['foo'].variable.value, 'H1') - self.assertEqual(blame['bar'].source.name, 'cell_1') - self.assertEqual(blame['bar'].variable.value, 'C2') - self.assertEqual(blame['zoo'].source.name, 'cloud_1') - self.assertEqual(blame['zoo'].variable.value, 'CL1') - self.assertEqual(blame['boo'].source.name, 'project_1') - self.assertEqual(blame['boo'].variable.value, 'P3') - - def test_hosts_create_duplicate_raises(self): - cloud_id = self.make_cloud(self.mock_project_id, 'cloud_1') - region_id = self.make_region(self.mock_project_id, cloud_id, - 'region_1') - self.make_host(self.mock_project_id, cloud_id, region_id, - 'www1.example.com', - IPAddress(u'10.1.2.101'), 'server') - new_host = {'name': 'www1.example.com', 'region_id': region_id, - 'ip_address': IPAddress(u'10.1.2.101'), - 'device_type': 'server', - 'cloud_id': cloud_id, 'project_id': self.mock_project_id} - self.assertRaises(exceptions.DuplicateDevice, dbapi.hosts_create, - self.context, new_host) - - def test_hosts_create_without_cell(self): - project_id, cloud_id, region_id, _, host_id = \ - self.make_very_small_cloud() - host = dbapi.hosts_get_by_id(self.context, host_id) - self.assertEqual(host.cloud_id, cloud_id) - self.assertEqual(host.region.id, region_id) - self.assertEqual(host.region.name, 'region_1') - self.assertIsNone(host.cell) - - # Verify resolved variables/blames override properly - self.assertEqual( - [obj.id for obj in host.resolution_order], - [host_id, region_id, cloud_id, uuid.UUID(project_id)]) - - self.assertEqual( - [variables for variables in host.resolution_order_variables], - [{'foo': 'H1', 'baz': 'H3'}, - {'foo': 'R1', 'bar': 'R2', 'bax': 'R3'}, - {'zoo': 'CL1'}, - {'foo': 'P1', 'zoo': 'P2', 'boo': 'P3'}]) - - self.assertEqual( - host.resolved, - {'foo': 'H1', 'bar': 'R2', 'baz': 'H3', 'bax': 'R3', 'zoo': 'CL1', - 'boo': 'P3'}) - - blame = host.blame(['foo', 'bar', 'zoo', 'boo']) - self.assertEqual(blame['foo'].source.name, 'www1.example.com') - self.assertEqual(blame['foo'].variable.value, 'H1') - self.assertEqual(blame['bar'].source.name, 'region_1') - self.assertEqual(blame['bar'].variable.value, 'R2') - self.assertEqual(blame['zoo'].source.name, 'cloud_1') - self.assertEqual(blame['zoo'].variable.value, 'CL1') - self.assertEqual(blame['boo'].source.name, 'project_1') - self.assertEqual(blame['boo'].variable.value, 'P3') - - def test_hosts_update(self): - cloud_id = self.make_cloud(self.mock_project_id, 'cloud_1') - region_id = self.make_region(self.mock_project_id, cloud_id, - 'region_1') - host_id = self.make_host(self.mock_project_id, cloud_id, region_id, - 'example', - IPAddress(u'10.1.2.101'), 'server', - bar='bar2') - name = "Host_New" - res = dbapi.hosts_update(self.context, host_id, {'name': 'Host_New'}) - self.assertEqual(res.name, name) - - def test_hosts_variable_resolved_with_parent(self): - project_id = self.make_project( - 'project_1', - foo='P1', zoo='P2', boo='P3') - cloud_id = self.make_cloud( - project_id, - 'cloud_1', - zoo='CL1', zab='CL2') - region_id = self.make_region( - project_id, - cloud_id, - 'region_1', - foo='R1', bar='R2', bax='R3') - cell_id = self.make_cell(project_id, cloud_id, region_id, 'cell_1', - bar='C2') - host1_id = self.make_host(project_id, cloud_id, region_id, - 'www1.example.com', - IPAddress(u'10.1.2.101'), 'server', - cell_id=cell_id, foo='H1', baz='H3') - host2_id = self.make_host(project_id, cloud_id, region_id, - 'www1.example2.com', - IPAddress(u'10.1.2.102'), 'server', - cell_id=cell_id, parent_id=host1_id) - host2 = dbapi.hosts_get_by_id(self.context, host2_id) - - # Verify resolved variables/blames override properly - self.assertEqual( - [obj.id for obj in host2.resolution_order], - [host2_id, host1_id, cell_id, region_id, cloud_id, - uuid.UUID(project_id)]) - - self.assertEqual( - [variables for variables in host2.resolution_order_variables], - [{}, - {'baz': 'H3', 'foo': 'H1'}, - {'bar': 'C2'}, - {'bar': 'R2', 'foo': 'R1', 'bax': 'R3'}, - {'zoo': 'CL1', 'zab': 'CL2'}, - {'foo': 'P1', 'zoo': 'P2', 'boo': 'P3'}]) - - self.assertEqual( - host2.resolved, - {'foo': 'H1', 'bar': 'C2', 'baz': 'H3', 'bax': 'R3', 'zoo': 'CL1', - 'boo': 'P3', 'zab': 'CL2'}) - - blame = host2.blame(['foo', 'bar', 'zoo', 'boo', 'zab']) - self.assertEqual(blame['foo'].source.name, 'www1.example.com') - self.assertEqual(blame['foo'].variable.value, 'H1') - self.assertEqual(blame['bar'].source.name, 'cell_1') - self.assertEqual(blame['bar'].variable.value, 'C2') - self.assertEqual(blame['zoo'].source.name, 'cloud_1') - self.assertEqual(blame['zoo'].variable.value, 'CL1') - self.assertEqual(blame['zab'].source.name, 'cloud_1') - self.assertEqual(blame['zab'].variable.value, 'CL2') - self.assertEqual(blame['boo'].source.name, 'project_1') - self.assertEqual(blame['boo'].variable.value, 'P3') - - def test_hosts_variables_no_resolved(self): - project_id = self.make_project('project_1', zoo='P2') - cloud_id = self.make_cloud(project_id, 'cloud_1') - region_id = self.make_region(project_id, cloud_id, 'region_1', - foo='R1') - host_id = self.make_host(project_id, cloud_id, region_id, - 'www.example.xyz', - IPAddress(u'10.1.2.101'), - 'server', bar='bar2') - host = dbapi.hosts_get_by_id(self.context, host_id) - self.assertEqual(host.name, 'www.example.xyz') - self.assertEqual(host.variables, {'bar': 'bar2'}) - - def test_hosts_resolved_vars_no_cells(self): - project_id = self.make_project('project_1') - cloud_id = self.make_cloud(project_id, 'cloud_1') - region_id = self.make_region(project_id, cloud_id, 'region_1', - foo='R1') - host_id = self.make_host(project_id, cloud_id, region_id, - 'www.example.xyz', - IPAddress(u'10.1.2.101'), - 'server', bar='bar2') - host = dbapi.hosts_get_by_id(self.context, host_id) - self.assertEqual(host.name, 'www.example.xyz') - self.assertEqual(host.resolved, {'bar': 'bar2', 'foo': 'R1'}) - - def test_host_labels_create(self): - cloud_id = self.make_cloud(self.mock_project_id, 'cloud_1') - region_id = self.make_region(self.mock_project_id, cloud_id, - 'region_1', - foo='R1') - host_id = self.make_host(self.mock_project_id, cloud_id, region_id, - 'www.example.xyz', - IPAddress(u'10.1.2.101'), - 'server', bar='bar2') - labels = {"labels": ["tom", "jerry"]} - dbapi.hosts_labels_update(self.context, host_id, labels) - - def test_host_labels_delete(self): - cloud_id = self.make_cloud(self.mock_project_id, 'cloud_1') - region_id = self.make_region(self.mock_project_id, cloud_id, - 'region_1', - foo='R1') - host_id = self.make_host(self.mock_project_id, cloud_id, region_id, - 'www.example.xyz', - IPAddress(u'10.1.2.101'), - 'server', bar='bar2') - _labels = {"labels": ["tom", "jerry", "jones"]} - dbapi.hosts_labels_update(self.context, host_id, _labels) - host = dbapi.hosts_get_by_id(self.context, host_id) - self.assertEqual(sorted(host.labels), sorted(_labels["labels"])) - _dlabels = {"labels": ["tom"]} - dbapi.hosts_labels_delete(self.context, host_id, _dlabels) - host = dbapi.hosts_get_by_id(self.context, host_id) - self.assertEqual(host.labels, {"jerry", "jones"}) - - def test_hosts_get_all_with_label_filters(self): - cloud_id = self.make_cloud(self.mock_project_id, 'cloud_1') - region_id = self.make_region(self.mock_project_id, cloud_id, - 'region_1') - labels = {"labels": ["compute"]} - host1 = self.make_host( - self.mock_project_id, - cloud_id, - region_id, - 'www1.example.com', - IPAddress(u'10.1.2.101'), - 'server', - ) - dbapi.hosts_labels_update(self.context, host1, labels) - - self.make_host( - self.mock_project_id, - cloud_id, - region_id, - 'www1.example2.com', - IPAddress(u'10.1.2.102'), - 'server', - ) - res, _ = dbapi.hosts_get_all(self.context, {"label": "compute"}, - default_pagination) - - self.assertEqual(len(res), 1) - self.assertEqual(res[0].name, 'www1.example.com') - - def test_hosts_get_all_with_filter_cell_id(self): - project_id = self.make_project('project_1', foo='P1', zoo='P2') - cloud_id = self.make_cloud(project_id, 'cloud_1') - region_id = self.make_region(project_id, cloud_id, 'region_1', - foo='R1') - cell_id1 = self.make_cell(project_id, cloud_id, region_id, 'cell_1', - bar='C2') - cell_id2 = self.make_cell(project_id, cloud_id, region_id, 'cell_2', - bar='C2') - self.assertNotEqual(cell_id1, cell_id2) - - self.make_host( - project_id, - cloud_id, - region_id, - 'www.example.xyz', - IPAddress(u'10.1.2.101'), - 'server', - cell_id=cell_id1, - ) - self.make_host( - project_id, - cloud_id, - region_id, - 'www.example.abc', - IPAddress(u'10.1.2.102'), - 'server', - cell_id=cell_id2, - ) - - all_res, _ = dbapi.hosts_get_all(self.context, {}, default_pagination) - self.assertEqual(len(all_res), 2) - self.assertEqual( - len([host for host in all_res if host['cell_id'] == cell_id1]), 1 - ) - - filters = { - "cell_id": cell_id1, - } - res, _ = dbapi.hosts_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0].name, 'www.example.xyz') - - def test_hosts_get_all_with_filters(self): - project_id = self.make_project('project_1', foo='P1', zoo='P2') - cloud_id = self.make_cloud(project_id, 'cloud_1') - region_id = self.make_region(project_id, cloud_id, 'region_1', - foo='R1') - host_id = self.make_host(project_id, cloud_id, region_id, - 'www.example.xyz', - IPAddress(u'10.1.2.101'), - 'server') - variables = {"key1": "value1", "key2": "value2"} - dbapi.variables_update_by_resource_id( - self.context, "hosts", host_id, variables - ) - filters = { - "region_id": region_id, - "vars": "key2:value2", - } - res, _ = dbapi.hosts_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0].name, 'www.example.xyz') - - def test_hosts_get_with_key_value_filters(self): - project_id = self.make_project('project_1', foo='P1', zoo='P2') - cloud_id = self.make_cloud(project_id, 'cloud_1') - region_id = self.make_region(project_id, cloud_id, 'region_1', - foo='R1') - host1 = self.make_host(project_id, cloud_id, region_id, - 'www.example.xyz', - IPAddress(u'10.1.2.101'), - 'server') - variables = {"key1": "example1", "key2": "Tom"} - dbapi.variables_update_by_resource_id( - self.context, "hosts", host1, variables - ) - # Second host with own variables - host2 = self.make_host(project_id, cloud_id, region_id, - 'www.example2.xyz', - IPAddress(u'10.1.2.102'), - 'server') - variables = {"key1": "example2", "key2": "Tom"} - dbapi.variables_update_by_resource_id( - self.context, "hosts", host2, variables - ) - filters = {"vars": "key1:example2"} - - res, _ = dbapi.hosts_get_all(self.context, filters, default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual('www.example2.xyz', res[0].name) - - filters = {"vars": "key2:Tom"} - res, _ = dbapi.hosts_get_all(self.context, filters, default_pagination) - self.assertEqual(len(res), 2) - - def test_hosts_get_all_with_filters_noexist(self): - project_id = self.make_project('project_1', foo='P1', zoo='P2') - cloud_id = self.make_cloud(project_id, 'cloud_1') - region_id = self.make_region(project_id, cloud_id, 'region_1', - foo='R1') - host_id = self.make_host(project_id, cloud_id, region_id, - 'www.example.xyz', - IPAddress(u'10.1.2.101'), - 'server') - variables = {"key1": "value1", "key2": "value2"} - dbapi.variables_update_by_resource_id( - self.context, "hosts", host_id, variables - ) - filters = { - "region_id": 1, - "vars": "key1:value5", - } - res, _ = dbapi.hosts_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 0) - - def test_hosts_create_sets_parent_id(self): - project_id = self.make_project('project_1') - cloud_id = self.make_cloud(project_id, 'cloud_1') - region_id = self.make_region(project_id, cloud_id, 'region_1') - parent_id = self.make_host( - project_id, cloud_id, region_id, '1.www.example.com', - IPAddress(u'10.1.2.101'), 'server' - ) - child = dbapi.hosts_create( - self.context, - { - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'hostname': '2.www.example.com', - 'ip_address': IPAddress(u'10.1.2.102'), - 'device_type': 'server', - 'parent_id': parent_id, - } - ) - self.assertEqual(parent_id, child.parent_id) - - def test_hosts_update_sets_parent_id(self): - project_id = self.make_project('project_1') - cloud_id = self.make_cloud(project_id, 'cloud_1') - region_id = self.make_region(project_id, cloud_id, 'region_1') - parent_id = self.make_host( - project_id, cloud_id, region_id, '1.www.example.com', - IPAddress(u'10.1.2.101'), 'server' - ) - child = dbapi.hosts_create( - self.context, - { - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'hostname': '2.www.example.com', - 'ip_address': IPAddress(u'10.1.2.102'), - 'device_type': 'server', - 'parent_id': None, - } - ) - self.assertIsNone(child.parent_id) - child_update = dbapi.hosts_update( - self.context, - child.id, - { - 'parent_id': parent_id, - } - ) - self.assertEqual(parent_id, child_update.parent_id) - - def test_hosts_update_fails_when_parent_id_set_to_own_id(self): - project_id = self.make_project('project_1') - cloud_id = self.make_cloud(project_id, 'cloud_1') - region_id = self.make_region(project_id, cloud_id, 'region_1') - host1 = dbapi.hosts_create( - self.context, - { - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'hostname': '1.www.example.com', - 'ip_address': IPAddress(u'10.1.2.101'), - 'device_type': 'server', - 'parent_id': None, - } - ) - self.assertRaises( - exceptions.BadRequest, - dbapi.hosts_update, - self.context, - host1.id, - { - 'parent_id': host1.id, - } - ) - - def test_hosts_update_fails_when_parent_set_to_descendant(self): - project_id = self.make_project('project_1') - cloud_id = self.make_cloud(project_id, 'cloud_1') - region_id = self.make_region(project_id, cloud_id, 'region_1') - parent = dbapi.hosts_create( - self.context, - { - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'hostname': '1.www.example.com', - 'ip_address': IPAddress(u'10.1.2.101'), - 'device_type': 'server', - 'parent_id': None, - } - ) - child = dbapi.hosts_create( - self.context, - { - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'hostname': '2.www.example.com', - 'ip_address': IPAddress(u'10.1.2.102'), - 'device_type': 'server', - 'parent_id': parent.id, - } - ) - grandchild = dbapi.hosts_create( - self.context, - { - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'hostname': '3.www.example.com', - 'ip_address': IPAddress(u'10.1.2.103'), - 'device_type': 'server', - 'parent_id': child.id, - } - ) - self.assertRaises( - exceptions.BadRequest, - dbapi.hosts_update, - self.context, - parent.id, - { - 'parent_id': grandchild.id, - } - ) - - def test_hosts_get_all_with_resolved_var_filters(self): - project_id = self.make_project('project_1', foo='P1', zoo='P2') - cloud_id = self.make_cloud(project_id, 'cloud_1') - region_id = self.make_region( - project_id, cloud_id, 'region_1', foo='R1') - switch_id = self.make_network_device( - project_id, cloud_id, region_id, - 'switch1.example.com', IPAddress('10.1.2.101'), 'switch', - zoo='S1', bar='S2') - self.make_host( - project_id, cloud_id, region_id, - 'www.example.xyz', IPAddress(u'10.1.2.101'), 'server', - parent_id=switch_id, - key1="value1", key2="value2") - self.make_host( - project_id, cloud_id, region_id, - 'www2.example.xyz', IPAddress(u'10.1.2.102'), 'server', - parent_id=switch_id, - key1="value-will-not-match", key2="value2") - - filters = { - "region_id": 1, - "vars": "key1:value1,zoo:S1,foo:R1", - "resolved-values": True, - } - res, _ = dbapi.hosts_get_all( - self.context, filters, default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0].name, 'www.example.xyz') diff --git a/craton/tests/unit/db/test_networks.py b/craton/tests/unit/db/test_networks.py deleted file mode 100644 index 95fabcc..0000000 --- a/craton/tests/unit/db/test_networks.py +++ /dev/null @@ -1,522 +0,0 @@ -import uuid - -from craton import exceptions -from craton.db import api as dbapi -from craton.tests.unit.db import base - - -default_pagination = {'limit': 30, 'marker': None} - -project_id1 = uuid.uuid4().hex -cloud_id1 = uuid.uuid4().hex -network1 = {"name": "test network", - "cidr": "192.168.1.0/24", - "gateway": "192.168.1.1", - "netmask": "255.255.255.0", - "region_id": 1, - "project_id": project_id1, - "cloud_id": cloud_id1} - -network2 = {"name": "test network2", - "cidr": "192.168.1.0/24", - "gateway": "192.168.1.1", - "netmask": "255.255.255.0", - "region_id": 2, - "project_id": project_id1, - "cloud_id": cloud_id1} - -device1 = {"hostname": "switch1", - "model_name": "Model-X", - "region_id": 1, - "project_id": project_id1, - "device_type": "switch", - "ip_address": "192.168.1.1", - "cloud_id": cloud_id1} - -device2 = {"hostname": "switch2", - "model_name": "Model-X", - "region_id": 2, - "project_id": project_id1, - "device_type": "switch", - "ip_address": "192.168.1.1", - "cloud_id": cloud_id1} - -device3 = {"hostname": "foo1", - "model_name": "Model-Bar", - "region_id": 1, - "project_id": project_id1, - "device_type": "foo", - "ip_address": "192.168.1.2", - "cloud_id": cloud_id1} - -network_interface1 = {"device_id": 1, - "project_id": project_id1, - "name": "eth1", - "ip_address": "192.168.0.2", - "interface_type": "ethernet"} - -network_interface2 = {"device_id": 2, - "project_id": project_id1, - "name": "eth1", - "ip_address": "192.168.0.3", - "interface_type": "ethernet"} - - -class NetworksDBTestCase(base.DBTestCase): - - def test_networks_create(self): - try: - dbapi.networks_create(self.context, network1) - except Exception: - self.fail("Networks create raised unexpected exception") - - def test_network_create_duplicate_name_raises(self): - dbapi.networks_create(self.context, network1) - self.assertRaises(exceptions.DuplicateNetwork, dbapi.networks_create, - self.context, network1) - - def test_networks_get_all(self): - dbapi.networks_create(self.context, network1) - dbapi.networks_create(self.context, network2) - filters = {} - res, _ = dbapi.networks_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 2) - - def test_networks_get_all_filter_region(self): - dbapi.networks_create(self.context, network1) - dbapi.networks_create(self.context, network2) - filters = { - 'region_id': network1['region_id'], - } - res, _ = dbapi.networks_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0]['name'], 'test network') - - def test_networks_get_by_id(self): - network = dbapi.networks_create(self.context, network1) - res = dbapi.networks_get_by_id(self.context, network.id) - self.assertEqual(res.name, 'test network') - - def test_networks_get_by_name_filter_no_exit(self): - dbapi.networks_create(self.context, network1) - filters = {"name": "foo", "region_id": network1['region_id']} - res, _ = dbapi.networks_get_all(self.context, filters, - default_pagination) - self.assertEqual(res, []) - - def test_network_update(self): - network = dbapi.networks_create(self.context, network1) - res = dbapi.networks_get_by_id(self.context, network.id) - self.assertEqual(res.name, 'test network') - new_name = 'test_network1' - res = dbapi.networks_update(self.context, res.id, - {'name': 'test_network1'}) - self.assertEqual(res.name, new_name) - - def test_networks_get_by_id_no_exist_raises(self): - # Since no network is created, any id should raise - self.assertRaises(exceptions.NotFound, dbapi.networks_get_by_id, - self.context, 4) - - def test_networks_delete(self): - network = dbapi.networks_create(self.context, network1) - # First make sure we have the network created - res = dbapi.networks_get_by_id(self.context, network.id) - self.assertEqual(res.id, network.id) - # Delete the network - dbapi.networks_delete(self.context, res.id) - self.assertRaises(exceptions.NotFound, dbapi.networks_get_by_id, - self.context, res.id) - - -class NetworkDevicesDBTestCase(base.DBTestCase): - - def test_network_devices_create(self): - try: - dbapi.network_devices_create(self.context, device1) - except Exception: - self.fail("Network device create raised unexpected exception") - - def test_network_devices_get_all(self): - dbapi.network_devices_create(self.context, device1) - dbapi.network_devices_create(self.context, device2) - filters = {} - res, _ = dbapi.network_devices_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 2) - - def test_network_device_get_all_filter_region(self): - dbapi.network_devices_create(self.context, device1) - dbapi.network_devices_create(self.context, device2) - filters = { - 'region_id': device1['region_id'], - } - res, _ = dbapi.network_devices_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0]['hostname'], 'switch1') - - def test_network_device_get_all_filter_name(self): - dbapi.network_devices_create(self.context, device1) - dbapi.network_devices_create(self.context, device2) - - name = device1['hostname'] - setup_res, _ = dbapi.network_devices_get_all(self.context, {}, - default_pagination) - - self.assertEqual(len(setup_res), 2) - matches = [dev for dev in setup_res if dev['hostname'] == name] - self.assertEqual(len(matches), 1) - - filters = { - 'name': name, - } - res, _ = dbapi.network_devices_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0]['hostname'], name) - - def test_network_device_get_all_filter_cell_id(self): - region_id = 1 - cell1 = dbapi.cells_create( - self.context, - { - 'name': 'cell1', - 'project_id': project_id1, - 'cloud_id': cloud_id1, - 'region_id': region_id, - } - ) - cell2 = dbapi.cells_create( - self.context, - { - 'name': 'cell2', - 'project_id': project_id1, - 'cloud_id': cloud_id1, - 'region_id': region_id, - } - ) - dbapi.network_devices_create( - self.context, dict(cell_id=cell1.id, **device1) - ) - dbapi.network_devices_create( - self.context, dict(cell_id=cell2.id, **device2) - ) - - setup_res, _ = dbapi.network_devices_get_all(self.context, {}, - default_pagination) - - self.assertEqual(len(setup_res), 2) - matches = [dev for dev in setup_res if dev['cell_id'] == cell1.id] - self.assertEqual(len(matches), 1) - - filters = { - 'cell_id': cell1.id, - } - res, _ = dbapi.network_devices_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0]['cell_id'], cell1.id) - - def test_network_device_get_all_filter_device_type(self): - dbapi.network_devices_create(self.context, device1) - dbapi.network_devices_create(self.context, device3) - - dev_type = device1['device_type'] - setup_res, _ = dbapi.network_devices_get_all(self.context, {}, - default_pagination) - - self.assertEqual(len(setup_res), 2) - matches = [dev for dev in setup_res if dev['device_type'] == dev_type] - self.assertEqual(len(matches), 1) - - filters = { - 'device_type': dev_type, - } - res, _ = dbapi.network_devices_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0]['device_type'], dev_type) - - def test_network_device_get_all_filter_id(self): - dbapi.network_devices_create(self.context, device1) - dbapi.network_devices_create(self.context, device2) - - setup_res, _ = dbapi.network_devices_get_all(self.context, {}, - default_pagination) - - self.assertEqual(len(setup_res), 2) - - dev_id = setup_res[0]['id'] - self.assertNotEqual(dev_id, setup_res[1]['id']) - - filters = { - 'id': dev_id - } - res, _ = dbapi.network_devices_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0]['id'], dev_id) - - def test_network_device_get_all_filter_ip_address(self): - dbapi.network_devices_create(self.context, device1) - dbapi.network_devices_create(self.context, device3) - - ip = device1['ip_address'] - setup_res, _ = dbapi.network_devices_get_all(self.context, {}, - default_pagination) - - self.assertEqual(len(setup_res), 2) - matches = [dev for dev in setup_res if str(dev['ip_address']) == ip] - self.assertEqual(len(matches), 1) - - filters = { - 'ip_address': ip, - } - res, _ = dbapi.network_devices_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(str(res[0]['ip_address']), ip) - - def test_network_devices_get_by_id(self): - device = dbapi.network_devices_create(self.context, device1) - res = dbapi.network_devices_get_by_id(self.context, device.id) - self.assertEqual(res.hostname, 'switch1') - - def test_network_devices_get_by_filter_no_exit(self): - dbapi.network_devices_create(self.context, device1) - filters = {"hostname": "foo"} - res, _ = dbapi.networks_get_all(self.context, filters, - default_pagination) - self.assertEqual(res, []) - - def test_network_devices_delete(self): - device = dbapi.network_devices_create(self.context, device1) - # First make sure we have the device - res = dbapi.network_devices_get_by_id(self.context, device.id) - self.assertEqual(res.id, device.id) - # Delete the device - dbapi.network_devices_delete(self.context, res.id) - self.assertRaises(exceptions.NotFound, dbapi.network_devices_get_by_id, - self.context, res.id) - - def test_network_devices_labels_create(self): - device = dbapi.network_devices_create(self.context, device1) - labels = {"labels": ["tom", "jerry"]} - dbapi.network_devices_labels_update(self.context, device.id, labels) - - def test_network_devices_update(self): - device = dbapi.network_devices_create(self.context, device1) - res = dbapi.network_devices_get_by_id(self.context, device.id) - self.assertEqual(res.hostname, 'switch1') - new_name = 'switch2' - res = dbapi.network_devices_update(self.context, res.id, - {'name': 'switch2'}) - self.assertEqual(res.name, new_name) - - def test_network_devices_labels_delete(self): - device = dbapi.network_devices_create(self.context, device1) - _labels = {"labels": ["tom", "jerry"]} - dbapi.network_devices_labels_update(self.context, device.id, _labels) - ndevice = dbapi.network_devices_get_by_id(self.context, device.id) - self.assertEqual(sorted(ndevice.labels), sorted(_labels["labels"])) - _dlabels = {"labels": ["tom"]} - dbapi.network_devices_labels_delete(self.context, ndevice.id, _dlabels) - ndevice = dbapi.network_devices_get_by_id(self.context, ndevice.id) - self.assertEqual(ndevice.labels, {"jerry"}) - - def test_network_devices_create_sets_parent_id(self): - parent = dbapi.network_devices_create( - self.context, - { - 'project_id': project_id1, - 'cloud_id': cloud_id1, - 'region_id': 1, - 'name': '1.www.example.com', - 'ip_address': '10.1.2.102', - 'device_type': 'switch', - } - ) - child = dbapi.network_devices_create( - self.context, - { - 'project_id': project_id1, - 'cloud_id': cloud_id1, - 'region_id': 1, - 'name': '2.www.example.com', - 'ip_address': '10.1.2.102', - 'device_type': 'switch', - 'parent_id': parent.id, - } - ) - self.assertEqual(parent.id, child.parent_id) - - def test_network_devices_update_sets_parent_id(self): - parent = dbapi.network_devices_create( - self.context, - { - 'project_id': project_id1, - 'cloud_id': cloud_id1, - 'region_id': 1, - 'name': '1.www.example.com', - 'ip_address': '10.1.2.102', - 'device_type': 'switch', - } - ) - child = dbapi.network_devices_create( - self.context, - { - 'project_id': project_id1, - 'cloud_id': cloud_id1, - 'region_id': 1, - 'name': '2.www.example.com', - 'ip_address': '10.1.2.102', - 'device_type': 'switch', - 'parent_id': None, - } - ) - self.assertIsNone(child.parent_id) - child_update = dbapi.network_devices_update( - self.context, - child.id, - { - 'parent_id': parent.id, - } - ) - self.assertEqual(parent.id, child_update.parent_id) - - def test_network_devices_update_fails_when_parent_id_set_to_own_id(self): - network_device1 = dbapi.network_devices_create( - self.context, - { - 'project_id': project_id1, - 'cloud_id': cloud_id1, - 'region_id': 1, - 'name': '1.www.example.com', - 'ip_address': '10.1.2.101', - 'device_type': 'switch', - 'parent_id': None, - } - ) - self.assertRaises( - exceptions.BadRequest, - dbapi.network_devices_update, - self.context, - network_device1.id, - { - 'parent_id': network_device1.id, - } - ) - - def test_network_devices_update_fails_when_parent_set_to_descendant(self): - parent = dbapi.network_devices_create( - self.context, - { - 'project_id': project_id1, - 'cloud_id': cloud_id1, - 'region_id': 1, - 'name': '1.www.example.com', - 'ip_address': '10.1.2.101', - 'device_type': 'switch', - 'parent_id': None, - } - ) - child = dbapi.network_devices_create( - self.context, - { - 'project_id': project_id1, - 'cloud_id': cloud_id1, - 'region_id': 1, - 'name': '2.www.example.com', - 'ip_address': '10.1.2.102', - 'device_type': 'switch', - 'parent_id': parent.id, - } - ) - grandchild = dbapi.network_devices_create( - self.context, - { - 'project_id': project_id1, - 'cloud_id': cloud_id1, - 'region_id': 1, - 'name': '3.www.example.com', - 'ip_address': '10.1.2.103', - 'device_type': 'switch', - 'parent_id': child.id, - } - ) - self.assertRaises( - exceptions.BadRequest, - dbapi.network_devices_update, - self.context, - parent.id, - { - 'parent_id': grandchild.id, - } - ) - - -class NetworkInterfacesDBTestCase(base.DBTestCase): - - def test_network_interfaces_create(self): - try: - dbapi.network_interfaces_create(self.context, network_interface1) - except Exception: - self.fail("Network interface create raised unexpected exception") - - def test_network_interfaces_get_all(self): - dbapi.network_interfaces_create(self.context, network_interface1) - dbapi.network_interfaces_create(self.context, network_interface2) - filters = {} - res, _ = dbapi.network_interfaces_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 2) - self.assertEqual( - str(res[0]['ip_address']), network_interface1['ip_address'] - ) - self.assertEqual( - str(res[1]['ip_address']), network_interface2['ip_address'] - ) - - def test_interface_get_all_filter_device_id(self): - dbapi.network_interfaces_create(self.context, network_interface1) - dbapi.network_interfaces_create(self.context, network_interface2) - filters = { - "device_id": 1, - } - res, _ = dbapi.network_interfaces_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0]['name'], 'eth1') - - def test_network_interfaces_get_by_id(self): - interface = dbapi.network_interfaces_create(self.context, - network_interface1) - res = dbapi.network_interfaces_get_by_id(self.context, interface.id) - self.assertEqual(res.name, 'eth1') - self.assertEqual(str(res.ip_address), network_interface1['ip_address']) - - def test_network_interfaces_update(self): - interface = dbapi.network_interfaces_create(self.context, - network_interface1) - res = dbapi.network_interfaces_get_by_id(self.context, interface.id) - self.assertEqual(res.name, 'eth1') - new_name = 'eth2' - res = dbapi.network_interfaces_update(self.context, interface.id, - {'name': 'eth2'}) - self.assertEqual(res.name, new_name) - self.assertEqual(str(res.ip_address), network_interface1['ip_address']) - - def test_network_interfaces_delete(self): - interface = dbapi.network_interfaces_create(self.context, - network_interface1) - # First make sure we have the interface created - res = dbapi.network_interfaces_get_by_id(self.context, interface.id) - self.assertEqual(res.id, interface.id) - # Delete the device - dbapi.network_interfaces_delete(self.context, res.id) - self.assertRaises(exceptions.NotFound, - dbapi.network_interfaces_get_by_id, - self.context, res.id) diff --git a/craton/tests/unit/db/test_projects.py b/craton/tests/unit/db/test_projects.py deleted file mode 100644 index 85b8a2b..0000000 --- a/craton/tests/unit/db/test_projects.py +++ /dev/null @@ -1,99 +0,0 @@ -import copy -import uuid - -from craton import exceptions -from craton.db import api as dbapi -from craton.tests.unit.db import base - -default_pagination = {'limit': 30, 'marker': None} - -project1 = {'name': 'project1'} -project2 = {'name': 'project2'} - - -class ProjectsDBTestCase(base.DBTestCase): - - def test_create_project(self): - # Set root, as only admin project can create other projects - project = dbapi.projects_create(self.context, project1) - self.assertEqual(project['name'], project1['name']) - - def test_create_project_no_root_fails(self): - context = copy.deepcopy(self.context) - context.is_admin_project = False - self.assertRaises(exceptions.AdminRequired, - dbapi.projects_create, - context, - project1) - - def test_project_get_all(self): - dbapi.projects_create(self.context, project1) - dbapi.projects_create(self.context, project2) - - res, _ = dbapi.projects_get_all(self.context, {}, default_pagination) - self.assertEqual(len(res), 2) - - def test_project_get_no_admin_project_raises(self): - self.context.is_admin_project = True - dbapi.projects_create(self.context, project1) - dbapi.projects_create(self.context, project2) - - # Now set admin_project = false to become normal project user - self.context.is_admin_project = False - self.assertRaises(exceptions.AdminRequired, - dbapi.projects_get_all, - self.context, - {}, default_pagination) - - def test_project_get_by_name(self): - dbapi.projects_create(self.context, project1) - dbapi.projects_create(self.context, project2) - - res, _ = dbapi.projects_get_by_name(self.context, project1['name'], {}, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0].name, project1['name']) - - def test_project_get_by_id(self): - project = dbapi.projects_create(self.context, project1) - res = dbapi.projects_get_by_id(self.context, project['id']) - self.assertEqual(str(res['id']), str(project['id'])) - - def test_project_create_id_uuid_type(self): - project = dbapi.projects_create(self.context, project1) - self.assertEqual(type(project['id']), uuid.UUID) - - def test_project_get_id_uuid_type(self): - project = dbapi.projects_create(self.context, project1) - res = dbapi.projects_get_by_id(self.context, project['id']) - self.assertEqual(type(res['id']), uuid.UUID) - - def test_project_variables_update_does_update_variables(self): - create_res = dbapi.projects_create(self.context, project1) - res = dbapi.projects_get_by_id(self.context, create_res.id) - self.assertEqual(res.variables, {}) - variables = {"key1": "value1", "key2": "value2"} - res = dbapi.variables_update_by_resource_id( - self.context, "projects", res.id, variables - ) - self.assertEqual(res.variables, variables) - new_variables = {"key1": "tom", "key2": "cat"} - res = dbapi.variables_update_by_resource_id( - self.context, "projects", res.id, new_variables - ) - self.assertEqual(res.variables, new_variables) - - def test_project_variables_delete(self): - create_res = dbapi.projects_create(self.context, project1) - res = dbapi.projects_get_by_id(self.context, create_res.id) - self.assertEqual(res.variables, {}) - variables = {"key1": "value1", "key2": "value2"} - res = dbapi.variables_update_by_resource_id( - self.context, "projects", res.id, variables - ) - self.assertEqual(res.variables, variables) - # NOTE(sulo): we delete variables by their key - res = dbapi.variables_delete_by_resource_id( - self.context, "projects", res.id, {"key1": "key1"} - ) - self.assertEqual(res.variables, {"key2": "value2"}) diff --git a/craton/tests/unit/db/test_regions.py b/craton/tests/unit/db/test_regions.py deleted file mode 100644 index 6fd5c2b..0000000 --- a/craton/tests/unit/db/test_regions.py +++ /dev/null @@ -1,98 +0,0 @@ -import uuid - -from craton.db import api as dbapi -from craton.tests.unit.db import base -from craton import exceptions - -default_pagination = {'limit': 30, 'marker': None} - -project_id1 = uuid.uuid4().hex -cloud_id1 = uuid.uuid4().hex -region1 = {'project_id': project_id1, 'cloud_id': cloud_id1, 'name': 'region1'} - - -class RegionsDBTestCase(base.DBTestCase): - - def test_region_create(self): - try: - dbapi.regions_create(self.context, region1) - except Exception: - self.fail("Region create raised unexpected exception") - - def test_region_create_duplicate_name_raises(self): - dbapi.regions_create(self.context, region1) - self.assertRaises(exceptions.DuplicateRegion, dbapi.regions_create, - self.context, region1) - - def test_regions_get_all(self): - dbapi.regions_create(self.context, region1) - filters = {} - res, _ = dbapi.regions_get_all(self.context, filters, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0]['name'], 'region1') - - def test_regions_get_all_with_var_filters(self): - res = dbapi.regions_create(self.context, region1) - variables = {"key1": "value1", "key2": "value2"} - dbapi.variables_update_by_resource_id( - self.context, "regions", res.id, variables - ) - filters = {} - filters["vars"] = "key1:value1" - regions, _ = dbapi.regions_get_all( - self.context, filters, default_pagination, - ) - self.assertEqual(len(regions), 1) - self.assertEqual(regions[0].name, region1['name']) - - def test_regions_get_all_with_var_filters_noexist(self): - res = dbapi.regions_create(self.context, region1) - variables = {"key1": "value1", "key2": "value2"} - dbapi.variables_update_by_resource_id( - self.context, "regions", res.id, variables - ) - filters = {} - filters["vars"] = "key1:value12" - regions, _ = dbapi.regions_get_all( - self.context, filters, default_pagination, - ) - self.assertEqual(len(regions), 0) - - def test_region_get_by_name(self): - dbapi.regions_create(self.context, region1) - res = dbapi.regions_get_by_name(self.context, region1['name']) - self.assertEqual(res.name, 'region1') - - def test_region_get_by_id(self): - dbapi.regions_create(self.context, region1) - res = dbapi.regions_get_by_id(self.context, 1) - self.assertEqual(res.name, 'region1') - - def test_region_get_by_name_no_exit_raises(self): - # TODO(sulo): fix sqlalchemy api first - pass - - def test_region_get_by_id_no_exist_raises(self): - # TODO(sulo): fix sqlalchemy api first - pass - - def test_region_update(self): - dbapi.regions_create(self.context, region1) - res = dbapi.regions_get_by_id(self.context, 1) - self.assertEqual(res.name, 'region1') - new_name = "region_New1" - res = dbapi.regions_update(self.context, res.id, - {'name': 'region_New1'}) - self.assertEqual(res.name, new_name) - - def test_region_delete(self): - dbapi.regions_create(self.context, region1) - # First make sure we have the region - res = dbapi.regions_get_by_name(self.context, region1['name']) - self.assertEqual(res.name, 'region1') - - dbapi.regions_delete(self.context, res.id) - self.assertRaises(exceptions.NotFound, - dbapi.regions_get_by_name, - self.context, 'fake-region') diff --git a/craton/tests/unit/db/test_users.py b/craton/tests/unit/db/test_users.py deleted file mode 100644 index dbb67e3..0000000 --- a/craton/tests/unit/db/test_users.py +++ /dev/null @@ -1,74 +0,0 @@ -import uuid - -from craton import exceptions -from craton.db import api as dbapi -from craton.tests.unit.db import base - -default_pagination = {'limit': 30, 'marker': None} - -project_id1 = uuid.uuid4().hex -project_id2 = uuid.uuid4().hex -root = {'project_id': project_id1, 'username': 'root', "is_admin": True, - "is_root": True} -user1 = {'project_id': project_id2, 'username': 'user1', "is_admin": True} -user2 = {'project_id': project_id2, 'username': 'user2', "is_admin": False} - - -class UsersDBTestCase(base.DBTestCase): - - def make_user(self, user, is_admin=True, is_root=False): - # Set admin context first - self.context.is_admin = is_admin - self.context.is_admin_project = is_root - user = dbapi.users_create(self.context, user) - return user - - def test_user_create(self): - user = self.make_user(user1) - self.assertEqual(user['username'], 'user1') - - def test_user_create_no_admin_context_fails(self): - self.assertRaises(exceptions.AdminRequired, - self.make_user, - user1, - is_admin=False) - - def test_users_get_all(self): - # Ensure context tenant is the same one as the - # one that will make request, test context has - # fake-tenant set by default. - self.context.tenant = user1['project_id'] - dbapi.users_create(self.context, user1) - dbapi.users_create(self.context, user2) - res = dbapi.users_get_all(self.context, {}, default_pagination) - self.assertEqual(len(res), 2) - - def test_user_get_all_no_project_context(self): - # Ensure when request has no root context and the request - # is not for the same project no user info is given back. - self.make_user(user1) - self.context.tenant = uuid.uuid4().hex - res, _ = dbapi.users_get_all(self.context, {}, default_pagination) - self.assertEqual(len(res), 0) - - def test_user_get_no_admin_context_raises(self): - self.make_user(user1) - self.context.is_admin = False - self.assertRaises(exceptions.AdminRequired, - dbapi.users_get_all, - self.context, - {}, default_pagination) - - def test_user_get_by_name(self): - dbapi.users_create(self.context, user1) - dbapi.users_create(self.context, user2) - self.context.tenant = user1['project_id'] - res, _ = dbapi.users_get_by_name(self.context, user1['username'], {}, - default_pagination) - self.assertEqual(len(res), 1) - self.assertEqual(res[0]['username'], user1['username']) - - def test_user_get_by_id(self): - user = self.make_user(user1) - res = dbapi.users_get_by_id(self.context, user["id"]) - self.assertEqual(res["username"], user["username"]) diff --git a/craton/tests/unit/db/test_variables.py b/craton/tests/unit/db/test_variables.py deleted file mode 100644 index a733dd3..0000000 --- a/craton/tests/unit/db/test_variables.py +++ /dev/null @@ -1,473 +0,0 @@ -from copy import deepcopy - -from craton import exceptions -from craton.db import api as dbapi -from craton.tests.unit.db import base - - -class VariablesDBTestCase: - - def _get_mock_resource_id(self): - # NOTE(thomasem): Project IDs are UUIDs not integers - if self.resources_type in ("projects",): - return "5a4e32e1-8571-4c2c-a088-a11f98900355" - return 1 - - def create_project(self, name, variables=None): - project = dbapi.projects_create( - self.context, - { - "name": name, - "variables": variables or {}, - }, - ) - return project.id - - def create_cloud(self, name, project_id, variables=None): - cloud = dbapi.clouds_create( - self.context, - { - 'name': name, - 'project_id': project_id, - 'variables': variables or {}, - }, - ) - return cloud.id - - def create_region(self, name, project_id, cloud_id, variables=None): - region = dbapi.regions_create( - self.context, - { - 'name': name, - 'project_id': project_id, - 'cloud_id': cloud_id, - 'variables': variables or {}, - }, - ) - return region.id - - def create_cell(self, name, project_id, cloud_id, region_id, - variables=None): - cell = dbapi.cells_create( - self.context, - { - 'name': name, - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'variables': variables or {} - }, - ) - return cell.id - - def create_host( - self, name, project_id, cloud_id, region_id, ip_address, host_type, - cell_id=None, parent_id=None, labels=None, variables=None, - ): - host = { - 'name': name, - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'cell_id': cell_id, - 'ip_address': ip_address, - 'parent_id': parent_id, - 'device_type': host_type, - 'active': True, - 'labels': labels or (), - 'variables': variables or {}, - } - - host = dbapi.hosts_create(self.context, host) - self.assertEqual(variables, host.variables) - - return host.id - - def create_network( - self, name, project_id, cloud_id, region_id, cidr, gateway, - netmask, cell_id=None, variables=None, - ): - network = { - 'name': name, - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'cell_id': cell_id, - 'cidr': cidr, - 'gateway': gateway, - 'netmask': netmask, - 'variables': variables or {}, - } - - network = dbapi.networks_create(self.context, network) - self.assertEqual(variables, network.variables) - - return network.id - - def create_network_device( - self, name, project_id, cloud_id, region_id, ip_address, - network_device_type, cell_id=None, parent_id=None, labels=None, - variables=None, - ): - network_device = { - 'name': name, - 'project_id': project_id, - 'cloud_id': cloud_id, - 'region_id': region_id, - 'cell_id': cell_id, - 'ip_address': ip_address, - 'parent_id': parent_id, - 'device_type': network_device_type, - 'active': True, - 'labels': labels or (), - 'variables': variables or {}, - } - - network_device = dbapi.network_devices_create( - self.context, network_device - ) - self.assertEqual(variables, network_device.variables) - - return network_device.id - - def setup_host(self, variables): - project_id = self.create_project(name='project1') - cloud_id = self.create_cloud(name='cloud1', project_id=project_id) - region_id = self.create_region( - name='region1', - project_id=project_id, - cloud_id=cloud_id, - ) - cell_id = self.create_cell( - name="cell1", - project_id=project_id, - cloud_id=cloud_id, - region_id=region_id, - ) - host_id = self.create_host( - name="host1", - project_id=project_id, - cloud_id=cloud_id, - region_id=region_id, - ip_address="192.168.2.1", - host_type="server", - cell_id=cell_id, - parent_id=None, - labels=None, - variables=variables, - ) - - return host_id - - def setup_network_device(self, variables): - project_id = self.create_project(name='project1') - cloud_id = self.create_cloud(name='cloud1', project_id=project_id) - region_id = self.create_region( - name='region1', - project_id=project_id, - cloud_id=cloud_id, - ) - cell_id = self.create_cell( - name="cell1", - project_id=project_id, - cloud_id=cloud_id, - region_id=region_id - ) - network_device_id = self.create_network_device( - name="network_device1", - project_id=project_id, - cloud_id=cloud_id, - region_id=region_id, - ip_address="192.168.2.1", - network_device_type="switch", - cell_id=cell_id, - parent_id=None, - labels=None, - variables=variables, - ) - - return network_device_id - - def setup_network(self, variables): - project_id = self.create_project(name='project1') - cloud_id = self.create_cloud(name='cloud1', project_id=project_id) - region_id = self.create_region( - name='region1', - project_id=project_id, - cloud_id=cloud_id, - ) - cell_id = self.create_cell( - name="cell1", - project_id=project_id, - cloud_id=cloud_id, - region_id=region_id, - ) - network_id = self.create_network( - name="network1", - project_id=project_id, - cloud_id=cloud_id, - region_id=region_id, - cell_id=cell_id, - cidr="192.168.2.0/24", - gateway="192.168.2.1", - netmask="255.255.255.0", - variables=variables, - ) - - return network_id - - def setup_cell(self, variables): - project_id = self.create_project(name='project1') - cloud_id = self.create_cloud(name='cloud1', project_id=project_id) - region_id = self.create_region( - name='region1', - project_id=project_id, - cloud_id=cloud_id, - ) - cell_id = self.create_cell( - name="cell1", - project_id=project_id, - cloud_id=cloud_id, - region_id=region_id, - variables=variables, - ) - - return cell_id - - def setup_region(self, variables): - project_id = self.create_project(name='project1') - cloud_id = self.create_cloud(name='cloud1', project_id=project_id) - region_id = self.create_region( - name='region1', - project_id=project_id, - cloud_id=cloud_id, - variables=variables, - ) - - return region_id - - def setup_cloud(self, variables): - project_id = self.create_project(name='project1') - cloud_id = self.create_cloud( - name='cloud1', - project_id=project_id, - variables=variables, - ) - - return cloud_id - - def setup_project(self, variables): - project_id = self.create_project(name='project1', variables=variables) - return project_id - - def setup_resource(self, *args, **kwargs): - setup_fn = { - "cells": self.setup_cell, - "hosts": self.setup_host, - "networks": self.setup_network, - "network-devices": self.setup_network_device, - "regions": self.setup_region, - "clouds": self.setup_cloud, - "projects": self.setup_project, - } - - return setup_fn[self.resources_type](*args, *kwargs) - - def test_get_resource_by_id_with_variables(self): - variables = { - "key1": "value1", - "key2": "value2", - "key3": "value3", - } - - resource_id = self.setup_resource(deepcopy(variables)) - - test = dbapi.resource_get_by_id( - self.context, self.resources_type, resource_id - ) - - self.assertEqual(resource_id, test.id) - self.assertEqual(variables, test.variables) - - def test_get_resource_by_id_not_found(self): - - self.assertRaises( - exceptions.NotFound, - dbapi.resource_get_by_id, - context=self.context, - resources=self.resources_type, - resource_id=self._get_mock_resource_id(), - ) - - def test_variables_update_by_resource_id_existing_empty(self): - existing_variables = {} - - resource_id = self.setup_resource(existing_variables) - - variables = { - "key1": "value1", - "key2": "value2", - "key3": "value3", - } - - test = dbapi.variables_update_by_resource_id( - self.context, self.resources_type, resource_id, deepcopy(variables) - ) - - self.assertEqual(resource_id, test.id) - self.assertEqual(variables, test.variables) - - validate = dbapi.resource_get_by_id( - self.context, self.resources_type, resource_id - ) - - self.assertEqual(resource_id, validate.id) - self.assertEqual(variables, validate.variables) - - def test_variables_update_by_resource_id_not_found(self): - self.assertRaises( - exceptions.NotFound, - dbapi.variables_update_by_resource_id, - context=self.context, - resources=self.resources_type, - resource_id=self._get_mock_resource_id(), - data={"key1": "value1"}, - ) - - def test_variables_update_by_resource_id_modify_existing(self): - existing_variables = { - "key1": "value1", - "key2": "value2", - "key3": "value3", - } - - update_variables = { - "key3": "newvalue3", - "key4": "value4", - } - - result_variables = deepcopy(existing_variables) - result_variables.update(deepcopy(update_variables)) - - resource_id = self.setup_resource(existing_variables) - - test = dbapi.variables_update_by_resource_id( - context=self.context, - resources=self.resources_type, - resource_id=resource_id, - data=deepcopy(update_variables) - ) - - self.assertEqual(resource_id, test.id) - self.assertEqual(result_variables, test.variables) - - validate = dbapi.resource_get_by_id( - self.context, self.resources_type, resource_id - ) - - self.assertEqual(resource_id, validate.id) - self.assertEqual(result_variables, validate.variables) - - def test_variables_delete_by_resource_id(self): - existing_variables = { - "key1": "value1", - "key2": "value2", - "key3": "value3", - } - - delete_variables = [ - "key2", - "key3", - ] - - result_variables = {"key1": "value1"} - - resource_id = self.setup_resource(existing_variables) - - test = dbapi.variables_delete_by_resource_id( - context=self.context, - resources=self.resources_type, - resource_id=resource_id, - data=delete_variables - ) - - self.assertEqual(resource_id, test.id) - self.assertEqual(result_variables, test.variables) - - validate = dbapi.resource_get_by_id( - self.context, self.resources_type, resource_id - ) - - self.assertEqual(resource_id, validate.id) - self.assertEqual(result_variables, validate.variables) - - def test_variables_delete_by_resource_id_resource_not_found(self): - - self.assertRaises( - exceptions.NotFound, - dbapi.variables_delete_by_resource_id, - context=self.context, - resources=self.resources_type, - resource_id=self._get_mock_resource_id(), - data={"key1": "value1"}, - ) - - def test_variables_delete_by_resource_id_variable_not_found(self): - existing_variables = { - "key1": "value1", - "key2": "value2", - "key3": "value3", - } - - delete_variables = [ - "key4", - ] - - result_variables = deepcopy(existing_variables) - - resource_id = self.setup_resource(existing_variables) - - test = dbapi.variables_delete_by_resource_id( - context=self.context, - resources=self.resources_type, - resource_id=resource_id, - data=delete_variables - ) - - self.assertEqual(resource_id, test.id) - self.assertEqual(result_variables, test.variables) - - validate = dbapi.resource_get_by_id( - self.context, self.resources_type, resource_id - ) - - self.assertEqual(resource_id, validate.id) - self.assertEqual(result_variables, validate.variables) - - -class HostsVariablesDBTestCase(VariablesDBTestCase, base.DBTestCase): - resources_type = "hosts" - - -class NetworkDevicesVariablesDBTestCase(VariablesDBTestCase, base.DBTestCase): - resources_type = "network-devices" - - -class CellsVariablesDBTestCase(VariablesDBTestCase, base.DBTestCase): - resources_type = "cells" - - -class RegionsVariablesDBTestCase(VariablesDBTestCase, base.DBTestCase): - resources_type = "regions" - - -class NetworksVariablesDBTestCase(VariablesDBTestCase, base.DBTestCase): - resources_type = "networks" - - -class ProjectsVariablesDBTestCase(VariablesDBTestCase, base.DBTestCase): - resources_type = "projects" - - -class CloudsVariablesDBTestCase(VariablesDBTestCase, base.DBTestCase): - resources_type = "clouds" diff --git a/craton/tests/unit/fake_resources.py b/craton/tests/unit/fake_resources.py deleted file mode 100644 index 53dd4dd..0000000 --- a/craton/tests/unit/fake_resources.py +++ /dev/null @@ -1,232 +0,0 @@ -import copy -import uuid - - -""" -Provides some fake resources - region, cell, host and other related -objects for test. -""" - - -class Project(object): - def __init__(self, id, name, variables): - self.id = uuid.UUID(id) - self.name = name - self.variables = variables - - def items(self): - return iter(self.__dict__.items()) - - -PROJECT1 = Project("4534dcb4-dacd-474f-8afc-8bd5ab2d26e8", - "project1", {"key1": "value1", "key2": "value2"}) -PROJECT2 = Project("77c527cb-837d-4fcb-bafb-af37ba3d13a4", - "project2", {"key1": "value1", "key2": "value2"}) - - -class Cloud(object): - def __init__(self, id, name, project_id, variables, labels=None): - self.id = id - self.name = name - self.project_id = project_id - self.variables = variables - self.labels = labels - - def items(self): - return iter(self.__dict__.items()) - - -CLOUD1 = Cloud(1, "cloud1", "abcd", {"key1": "value1", "key2": "value2"}) -CLOUD2 = Cloud(2, "cloud2", "abcd", {"key3": "value3", "key4": "value4"}) -CLOUDS_LIST = [CLOUD1, CLOUD2] - - -class User(object): - def __init__(self, id, username, project_id, is_admin, is_root, - api_key, roles=None): - self.id = id - self.username = username - self.project_id = project_id - self.is_admin = is_admin - self.is_root = is_root - self.api_key = api_key - self.roles = roles - - def items(self): - return iter(self.__dict__.items()) - - -USER1 = User(1, 'user1', "2757a1b4-cd90-4891-886c-a246fd4e7064", True, False, - 'xx-yy-zz') -USER2 = User(2, 'user2', "05d081ca-dcf5-4e96-b132-23b94d665799", False, False, - 'aa-bb-cc') - - -class Cell(object): - def __init__(self, id, name, status, region_id, cloud_id, project_id, - variables, labels=None): - self.id = id - self.name = name - self.status = status - self.region_id = region_id - self.cloud_id = cloud_id - self.project_id = project_id - self.variables = variables - self.resolved = variables - self.labels = labels - - def items(self): - return iter(self.__dict__.items()) - - -CELL1 = Cell(1, "cell1", "active", 1, 1, 1, {"key1": "value1", - "key2": "value2"}) -CELL2 = Cell(2, "cell2", "active", "2", "1", "abcd", {"key3": "value3", - "key4": "value4"}) -CELL3 = Cell(3, "cell1", "active", 2, 1, 1, {"key1": "value1", - "key2": "value2"}) - -CELL_LIST = [CELL1, CELL2] -CELL_LIST2 = [CELL1, CELL3] - - -class Region(object): - def __init__(self, id, name, project_id, cloud_id, variables, labels=None): - self.id = id - self.name = name - self.project_id = project_id - self.cloud_id = cloud_id - self.variables = variables - self.resolved = variables - self.labels = labels - - def items(self): - return iter(self.__dict__.items()) - - -REGION1 = Region(1, "region1", "abcd", 1, {"key1": "value1", "key2": "value2"}) -REGION2 = Region(2, "region2", "abcd", 1, {"key3": "value3", "key4": "value4"}) -REGIONS_LIST = [REGION1, REGION2] - - -class Host(object): - def __init__(self, id, name, project_id, cloud_id, region_id, ip_address, - device_type, variables, labels=None, cell_id=None, - parent_id=None): - self.id = id - self.name = name - self.project_id = project_id - self.cloud_id = cloud_id - self.region_id = region_id - self.ip_address = ip_address - self.variables = variables - self.resolved = copy.copy(variables) - self.device_type = device_type - self.labels = labels - self.cell_id = cell_id - self.parent_id = parent_id - - def items(self): - return iter(self.__dict__.items()) - - -HOST1 = Host(1, "www.craton.com", 1, 1, 1, "192.168.1.1", "server", - {"key1": "value1", "key2": "value2"}) -HOST2 = Host(2, "www.example.com", "1", "1", "1", "192.168.1.2", "server", - {"key1": "value1", "key2": "value2"}) -HOST3 = Host(3, "www.example.net", "1", "!", "2", "10.10.0.1", "server", - {"key1": "value1", "key2": "value2"}) -HOST4 = Host(4, "www.example.net", "1", "1", "2", "10.10.0.1", "server", - {"key1": "value1", "key2": "value2"}, labels=["a", "b"]) -HOSTS_LIST_R1 = [HOST1, HOST2] -HOSTS_LIST_R2 = [HOST3] -HOSTS_LIST_R3 = [HOST1, HOST2, HOST3] - - -class Networks(object): - def __init__(self, id, name, project_id, cidr, gateway, netmask, - variables, cloud_id, region_id, labels=None): - self.id = id - self.name = name - self.project_id = project_id - self.cidr = cidr - self.gateway = gateway - self.netmask = netmask - self.variables = variables - self.resolved = copy.copy(variables) - self.labels = labels - self.cloud_id = cloud_id - self.region_id = region_id - - def items(self): - return iter(self.__dict__.items()) - - -NETWORK1 = Networks(1, "PrivateNetwork", 1, "192.168.1.0/24", "192.168.1.1", - "255.255.255.0", {"key1": "value1"}, 1, 1) -NETWORK2 = Networks(2, "PublicNetwork", 1, "10.10.1.0/24", "10.10.1.1", - "255.255.255.0", {"pkey1": "pvalue1"}, 1, 1) -NETWORK3 = Networks(3, "OtherNetwork", 1, "10.10.1.0/24", "10.10.1.2", - "255.255.255.0", {"okey1": "ovalue1"}, 1, 2) -NETWORKS_LIST = [NETWORK1, NETWORK2] -NETWORKS_LIST2 = [NETWORK1, NETWORK2, NETWORK3] - - -class NetworkDevice(): - def __init__(self, id, name, project_id, cloud_id, region_id, device_type, - ip_address, variables, labels=None, cell_id=None, - parent_id=None): - self.name = name - self.id = id - self.project_id = project_id - self.region_id = region_id - self.device_type = device_type - self.ip_address = ip_address - self.variables = variables - self.resolved = copy.copy(variables) - self.labels = labels - self.cloud_id = cloud_id - self.cell_id = cell_id - self.parent_id = parent_id - - def items(self): - return iter(self.__dict__.items()) - - -NETWORK_DEVICE1 = NetworkDevice(1, "NetDevices1", 1, 1, 1, "Server", - "10.10.0.1", - {"key1": "value1", "key2": "value2"}, - labels=["a", "b"]) -NETWORK_DEVICE2 = NetworkDevice(2, "NetDevices2", 1, 1, 2, "Server", - "10.10.0.2", - {"key1": "value1", "key2": "value2"}, - labels=["a", "b"]) - -NETWORK_DEVICE_LIST1 = [NETWORK_DEVICE1] -NETWORK_DEVICE_LIST2 = [NETWORK_DEVICE1, NETWORK_DEVICE2] - - -class NetworkInterface(): - def __init__(self, id, name, device_id, project_id, interface_type, - ip_address, variables): - self.id = id - self.name = name - self.device_id = device_id - self.project_id = project_id - self.interface_type = interface_type - self.ip_address = ip_address - self.variables = variables - - def items(self): - return iter(self.__dict__.items()) - - -NETWORK_INTERFACE1 = NetworkInterface(1, "NetInterface", 1, 1, - "interface_type1", "10.10.0.1", - {"key1": "value1", "key2": "value2"}) -NETWORK_INTERFACE2 = NetworkInterface(2, "NetInterface", 2, 1, - "interface_type2", "10.10.0.2", - {"key1": "value1", "key2": "value2"}) - -NETWORK_INTERFACE_LIST1 = [NETWORK_INTERFACE1] -NETWORK_INTERFACE_LIST2 = [NETWORK_INTERFACE1, NETWORK_INTERFACE2] diff --git a/craton/tests/unit/test_api.py b/craton/tests/unit/test_api.py deleted file mode 100644 index d885595..0000000 --- a/craton/tests/unit/test_api.py +++ /dev/null @@ -1,1843 +0,0 @@ -import copy -import mock -import uuid - -from oslo_serialization import jsonutils - -from craton import api -from craton import exceptions -from craton.api import middleware -from craton.db.sqlalchemy import api as dbapi -from craton.tests import TestCase -from craton.tests.unit import fake_resources - -project_id1 = uuid.uuid4().hex - - -class APIV1Test(TestCase): - def setUp(self): - super(APIV1Test, self).setUp() - - # Create the app first - self.app = api.setup_app() - # Put the context middleware - self.app.wsgi_app = middleware.NoAuthContextMiddleware( - self.app.wsgi_app) - # Create client - self.client = self.app.test_client() - - def get(self, path, **kw): - resp = self.client.get(path=path) - resp.json = jsonutils.loads(resp.data.decode('utf-8')) - return resp - - def post(self, path, data, **kw): - content = jsonutils.dumps(data) - content_type = 'application/json' - resp = self.client.post(path=path, content_type=content_type, - data=content) - resp.json = jsonutils.loads(resp.data.decode('utf-8')) - return resp - - def put(self, path, data, **kw): - content = jsonutils.dumps(data) - content_type = 'application/json' - resp = self.client.put(path=path, content_type=content_type, - data=content) - resp.json = jsonutils.loads(resp.data.decode('utf-8')) - return resp - - def delete(self, path, data=None): - if data: - content = jsonutils.dumps(data) - content_type = 'application/json' - else: - content = None - content_type = None - resp = self.client.delete( - path=path, content_type=content_type, data=content - ) - return resp - - -class APIV1WithContextTest(TestCase): - def setUp(self): - super(APIV1WithContextTest, self).setUp() - self.app = api.setup_app() - self.app.wsgi_app = middleware.LocalAuthContextMiddleware( - self.app.wsgi_app) - self.client = self.app.test_client() - - def get(self, path, **kw): - resp = self.client.get(path=path, **kw) - resp.json = jsonutils.loads(resp.data.decode('utf-8')) - return resp - - -class APIV1RBACLiteTest(TestCase): - - @staticmethod - def _test_func(context, resources): - pass - - def test_permissions_for_project_create_non_admin_raises(self): - context = mock.Mock() - context.is_admin = False - resources = 'projects' - fn = dbapi.permissions_for(dbapi.CRUD.CREATE)(self._test_func) - self.assertRaises(exceptions.AdminRequired, - fn, - context, - resources) - - def test_permissions_for_project_read_non_admin_raises(self): - context = mock.Mock() - context.is_admin = False - resources = 'projects' - fn = dbapi.permissions_for(dbapi.CRUD.READ)(self._test_func) - self.assertRaises(exceptions.AdminRequired, - fn, - context, - resources) - - def test_permissions_for_project_update_non_admin_raises(self): - context = mock.Mock() - context.is_admin = False - resources = 'projects' - fn = dbapi.permissions_for(dbapi.CRUD.UPDATE)(self._test_func) - self.assertRaises(exceptions.AdminRequired, - fn, - context, - resources) - - def test_permissions_for_project_delete_non_admin_raises(self): - context = mock.Mock() - context.is_admin = False - resources = 'projects' - fn = dbapi.permissions_for(dbapi.CRUD.DELETE)(self._test_func) - self.assertRaises(exceptions.AdminRequired, - fn, - context, - resources) - - def test_permissions_for_project_create_admin_works(self): - context = mock.Mock() - context.is_admin = True - resources = 'projects' - fn = dbapi.permissions_for(dbapi.CRUD.CREATE)(self._test_func) - fn(context, resources) - - def test_permissions_for_project_read_admin_works(self): - context = mock.Mock() - context.is_admin = True - resources = 'projects' - fn = dbapi.permissions_for(dbapi.CRUD.READ)(self._test_func) - fn(context, resources) - - def test_permissions_for_project_update_admin_works(self): - context = mock.Mock() - context.is_admin = True - resources = 'projects' - fn = dbapi.permissions_for(dbapi.CRUD.UPDATE)(self._test_func) - fn(context, resources) - - def test_permissions_for_project_delete_admin_works(self): - context = mock.Mock() - context.is_admin = True - resources = 'projects' - fn = dbapi.permissions_for(dbapi.CRUD.DELETE)(self._test_func) - fn(context, resources) - - def test_permissions_for_nonlisted_resources_create_non_admin_works(self): - context = mock.Mock() - context.is_admin = False - resources = 'iamfakeresources' - fn = dbapi.permissions_for(dbapi.CRUD.CREATE)(self._test_func) - fn(context, resources) - - def test_permissions_for_nonlisted_resources_read_non_admin_works(self): - context = mock.Mock() - context.is_admin = False - resources = 'iamfakeresources' - fn = dbapi.permissions_for(dbapi.CRUD.READ)(self._test_func) - fn(context, resources) - - def test_permissions_for_nonlisted_resources_update_non_admin_works(self): - context = mock.Mock() - context.is_admin = False - resources = 'iamfakeresources' - fn = dbapi.permissions_for(dbapi.CRUD.UPDATE)(self._test_func) - fn(context, resources) - - def test_permissions_for_nonlisted_resources_delete_non_admin_works(self): - context = mock.Mock() - context.is_admin = False - resources = 'iamfakeresources' - fn = dbapi.permissions_for(dbapi.CRUD.DELETE)(self._test_func) - fn(context, resources) - - -class APIV1MiddlewareTest(APIV1WithContextTest): - def test_no_auth_token_returns_401(self): - resp = self.get('v1/cells/1') - self.assertEqual(401, resp.status_code) - - def test_ensure_non_uuid_token_returns_401(self): - headers = {"X-Auth-Project": "abcd", "X-Auth-Token": "abcd123"} - resp = self.get('v1/cells/1', headers=headers) - self.assertEqual(401, resp.status_code) - - @mock.patch.object(dbapi, 'cells_get_by_id') - @mock.patch.object(dbapi, 'get_user_info') - def test_ensure_valid_uuid_is_processed(self, mock_user, mock_cell): - mock_user.return_value = fake_resources.USER1 - mock_cell.return_value = fake_resources.CELL1 - headers = {"X-Auth-Project": "2757a1b4-cd90-4891-886c-a246fd4e7064", - "X-Auth-Token": "xx-yy-zz"} - resp = self.get('v1/cells/1', headers=headers) - self.assertEqual(200, resp.status_code) - - -class APIV1CellsIDTest(APIV1Test): - @mock.patch.object(dbapi, 'cells_get_by_id') - def test_get_cells_by_id(self, mock_cells): - mock_cells.return_value = fake_resources.CELL1 - resp = self.get('v1/cells/1') - self.assertEqual(resp.json["name"], fake_resources.CELL1.name) - - @mock.patch.object(dbapi, 'cells_get_by_id') - def test_get_cells_by_bad_id_is_404(self, mock_cells): - mock_cells.side_effect = exceptions.NotFound() - resp = self.get('v1/cells/3') - self.assertEqual(404, resp.status_code) - - @mock.patch.object(dbapi, 'cells_delete') - def test_cells_delete(self, mock_cell): - resp = self.delete('v1/cells/1') - self.assertEqual(204, resp.status_code) - - @mock.patch.object(dbapi, 'cells_update') - def test_put_cells_by_id(self, mock_cell): - data = {'note': 'new note', 'name': 'new name'} - resp = self.put('v1/cells/1', data=data) - self.assertEqual(200, resp.status_code) - mock_cell.assert_called_once_with(mock.ANY, '1', data) - - @mock.patch.object(dbapi, 'cells_update') - def test_put_cells_by_id_invalid_property(self, mock_cell): - data = {'foo': 'isinvalid'} - resp = self.put('v1/cells/1', data=data) - self.assertEqual(400, resp.status_code) - mock_cell.assert_not_called() - - @mock.patch.object(dbapi, 'cells_update') - def test_update_cell(self, mock_cell): - record = dict(fake_resources.CELL1.items()) - payload = {'name': 'cell1-New'} - record.update(payload) - db_data = payload.copy() - mock_cell.return_value = record - - resp = self.put('v1/cells/1', data=payload) - - self.assertEqual(resp.json['name'], db_data['name']) - self.assertEqual(200, resp.status_code) - mock_cell.assert_called_once_with(mock.ANY, '1', db_data) - - -class APIV1CellsTest(APIV1Test): - @mock.patch.object(dbapi, 'cells_get_all') - def test_get_cells(self, mock_cells): - mock_cells.return_value = fake_resources.CELL_LIST - resp = self.get('v1/cells') - self.assertEqual(len(resp.json), len(fake_resources.CELL_LIST)) - mock_cells.assert_called_once_with( - mock.ANY, {'resolved-values': True}, - {'limit': 30, 'marker': None}, - ) - - @mock.patch.object(dbapi, 'cells_get_all') - def test_cells_get_all_with_details(self, mock_cells): - mock_cells.return_value = (fake_resources.CELL_LIST, {}) - resp = self.get('v1/cells?details=all') - self.assertEqual(len(resp.json), len(fake_resources.CELL_LIST)) - for cell in resp.json['cells']: - self.assertTrue('variables' in cell) - - @mock.patch.object(dbapi, 'cells_get_all') - def test_get_cells_invalid_property(self, mock_cells): - resp = self.get('v1/cells?foo=isaninvalidproperty') - self.assertEqual(400, resp.status_code) - mock_cells.assert_not_called() - - @mock.patch.object(dbapi, 'cells_get_all') - def test_get_cells_with_name_filters(self, mock_cells): - cell_name = 'cell1' - mock_cells.return_value = (fake_resources.CELL_LIST2, {}) - resp = self.get('v1/cells?name={}'.format(cell_name)) - cells = resp.json['cells'] - self.assertEqual(len(cells), 2) - # Ensure we got the right cell - self.assertEqual(cells[0]["name"], cell_name) - self.assertEqual(cells[1]["name"], cell_name) - - @mock.patch.object(dbapi, 'cells_get_all') - def test_get_cells_with_name_and_region_filters(self, mock_cells): - mock_cells.return_value = ([fake_resources.CELL1], {}) - resp = self.get('v1/cells?region_id=1&name=cell1') - self.assertEqual(len(resp.json['cells']), 1) - # Ensure we got the right cell - self.assertEqual(resp.json['cells'][0]["name"], - fake_resources.CELL1.name) - - @mock.patch.object(dbapi, 'cells_get_all') - def test_get_cells_with_id_filters(self, mock_cells): - mock_cells.return_value = ([fake_resources.CELL1], {}) - resp = self.get('v1/cells?region_id=1&id=1') - cells = resp.json['cells'] - self.assertEqual(len(cells), 1) - # Ensure we got the right cell - self.assertEqual(cells[0]["name"], fake_resources.CELL1.name) - - @mock.patch.object(dbapi, 'cells_get_all') - def test_get_cells_with_vars_filters(self, mock_cells): - mock_cells.return_value = ([fake_resources.CELL1], {}) - resp = self.get('v1/cells?region_id=1&vars=somekey:somevalue') - self.assertEqual(len(resp.json['cells']), 1) - self.assertEqual(resp.json['cells'][0]["name"], - fake_resources.CELL1.name) - - @mock.patch.object(dbapi, 'cells_get_all') - def test_get_cell_no_exist_by_name_fails(self, mock_cell): - err = exceptions.NotFound() - mock_cell.side_effect = err - resp = self.get('v1/cells?region_id=1&name=dontexist') - self.assertEqual(404, resp.status_code) - - @mock.patch.object(dbapi, 'cells_create') - def test_create_cell_with_valid_data(self, mock_cell): - mock_cell.return_value = fake_resources.CELL1 - data = {'name': 'cell1', 'region_id': 1, 'cloud_id': 1} - resp = self.post('v1/cells', data=data) - self.assertEqual(201, resp.status_code) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "http://localhost/v1/cells/1" - ) - - @mock.patch.object(dbapi, 'cells_create') - def test_create_cell_returns_cell_obj(self, mock_cell): - mock_cell.return_value = fake_resources.CELL1 - data = { - 'name': "cell1", - 'region_id': 1, - 'cloud_id': 1, - 'variables': {'key1': 'value1', 'key2': 'value2'}, - } - resp = self.post('v1/cells', data=data) - - expected_result = { - 'id': 1, - 'name': 'cell1', - 'region_id': 1, - 'cloud_id': 1, - 'project_id': 1, - 'variables': {'key1': 'value1', 'key2': 'value2'}, - } - self.assertEqual(201, resp.status_code) - self.assertEqual(expected_result, resp.json) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "http://localhost/v1/cells/1" - ) - - @mock.patch.object(dbapi, 'cells_create') - def test_create_cell_fails_with_invalid_data(self, mock_cell): - mock_cell.return_value = None - # data is missing required cell name - data = {'region_id': 1} - resp = self.post('v1/cells', data=data) - self.assertEqual(400, resp.status_code) - - @mock.patch.object(dbapi, 'cells_create') - def test_create_cell_with_invalid_property(self, mock_cell): - data = {'name': 'cell1', 'region_id': 1, 'foo': 'invalidproperty'} - resp = self.post('v1/cells', data=data) - self.assertEqual(400, resp.status_code) - mock_cell.assert_not_called() - - -class APIV1CellsVariablesTest(APIV1Test): - @mock.patch.object(dbapi, 'resource_get_by_id') - def test_cells_get_variables(self, mock_cell): - mock_cell.return_value = fake_resources.CELL1 - resp = self.get('v1/cells/1/variables') - expected = {"variables": {"key1": "value1", "key2": "value2"}} - self.assertEqual(resp.json, expected) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_cells_put_variables(self, mock_cell): - db_return_value = copy.deepcopy(fake_resources.CELL1) - db_return_value.variables["a"] = "b" - mock_cell.return_value = db_return_value - payload = {"a": "b"} - db_data = payload.copy() - resp = self.put('v1/cells/1/variables', data=payload) - self.assertEqual(resp.status_code, 200) - mock_cell.assert_called_once_with(mock.ANY, "cells", '1', db_data) - expected = { - "variables": {"key1": "value1", "key2": "value2", "a": "b"}, - } - self.assertDictEqual(expected, resp.json) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_cells_put_bad_data_type(self, mock_cell): - payload = ["a", "b"] - resp = self.put('v1/cells/1/variables', data=payload) - self.assertEqual(resp.status_code, 400) - mock_cell.assert_not_called() - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_cells_delete_variables(self, mock_cell): - payload = ["key1"] - db_data = payload.copy() - resp = self.delete('v1/cells/1/variables', data=payload) - self.assertEqual(resp.status_code, 204) - mock_cell.assert_called_once_with(mock.ANY, "cells", '1', db_data) - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_cells_delete_bad_data_type(self, mock_cell): - payload = {"a": "b"} - resp = self.delete('v1/cells/1/variables', data=payload) - self.assertEqual(resp.status_code, 400) - mock_cell.assert_not_called() - - -class APIV1CloudsIDTest(APIV1Test): - @mock.patch.object(dbapi, 'clouds_get_by_id') - def test_clouds_get_by_id(self, mock_clouds): - mock_clouds.return_value = fake_resources.CLOUD1 - resp = self.get('v1/clouds/1') - self.assertEqual(resp.json['name'], fake_resources.CLOUD1.name) - - @mock.patch.object(dbapi, 'clouds_get_by_id') - def test_clouds_get_by_bad_id_is_404(self, mock_clouds): - mock_clouds.side_effect = exceptions.NotFound() - resp = self.get('v1/clouds/1') - self.assertEqual(404, resp.status_code) - - @mock.patch.object(dbapi, 'clouds_delete') - def test_delete_cloud(self, mock_cloud): - resp = self.delete('v1/clouds/1') - self.assertEqual(204, resp.status_code) - - @mock.patch.object(dbapi, 'clouds_update') - def test_put_clouds_by_id(self, mock_cloud): - data = {'note': 'new note', 'name': 'new name'} - resp = self.put('v1/clouds/1', data=data) - self.assertEqual(200, resp.status_code) - mock_cloud.assert_called_once_with(mock.ANY, '1', data) - - @mock.patch.object(dbapi, 'clouds_update') - def test_put_clouds_by_id_invalid_property(self, mock_cloud): - data = {'foo': 'isinvalid'} - resp = self.put('v1/clouds/1', data=data) - self.assertEqual(400, resp.status_code) - mock_cloud.assert_not_called() - - @mock.patch.object(dbapi, 'clouds_update') - def test_update_cloud(self, mock_cloud): - record = dict(fake_resources.CLOUD1.items()) - payload = {"name": "cloud_New1"} - db_data = payload.copy() - record.update(payload) - mock_cloud.return_value = record - - resp = self.put('v1/clouds/1', data=payload) - - self.assertEqual(resp.status_code, 200) - self.assertEqual(resp.json['name'], 'cloud_New1') - mock_cloud.assert_called_once_with(mock.ANY, '1', db_data) - - -class APIV1CloudsTest(APIV1Test): - @mock.patch.object(dbapi, 'clouds_get_all') - def test_clouds_get_all(self, mock_clouds): - mock_clouds.return_value = (fake_resources.CLOUDS_LIST, {}) - resp = self.get('v1/clouds') - self.assertEqual(len(resp.json), len(fake_resources.CLOUDS_LIST)) - - @mock.patch.object(dbapi, 'clouds_get_all') - def test_clouds_get_all_by_invalid_property_name(self, mock_clouds): - resp = self.get('v1/clouds?foo=invalidpropertyname') - self.assertEqual(400, resp.status_code) - mock_clouds.assert_not_called() - - @mock.patch.object(dbapi, 'clouds_get_by_name') - def test_clouds_get_by_name_filters(self, mock_clouds): - mock_clouds.return_value = fake_resources.CLOUD1 - resp = self.get('v1/clouds?name=cloud1') - clouds = resp.json['clouds'] - self.assertEqual(clouds[0]["name"], fake_resources.CLOUD1.name) - - @mock.patch.object(dbapi, 'clouds_get_by_id') - def test_clouds_get_by_id_filters(self, mock_clouds): - mock_clouds.return_value = fake_resources.CLOUD1 - resp = self.get('v1/clouds?id=1') - clouds = resp.json['clouds'] - self.assertEqual(clouds[0]["name"], fake_resources.CLOUD1.name) - - @mock.patch.object(dbapi, 'clouds_get_all') - def test_clouds_get_by_vars_filters(self, mock_clouds): - mock_clouds.return_value = ([fake_resources.CLOUD1], {}) - resp = self.get('v1/clouds?vars=somekey:somevalue') - self.assertEqual(len(resp.json['clouds']), 1) - self.assertEqual(resp.json['clouds'][0]["name"], - fake_resources.CLOUD1.name) - - @mock.patch.object(dbapi, 'clouds_get_by_name') - def test_get_cloud_no_exist_by_name_fails(self, mock_clouds): - mock_clouds.side_effect = exceptions.NotFound() - resp = self.get('v1/clouds?name=bla') - self.assertEqual(404, resp.status_code) - - @mock.patch.object(dbapi, 'clouds_create') - def test_post_cloud_with_valid_data(self, mock_cloud): - mock_cloud.return_value = fake_resources.CLOUD1 - data = {'name': 'cloud1'} - resp = self.post('v1/clouds', data=data) - self.assertEqual(201, resp.status_code) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "http://localhost/v1/clouds/1" - ) - - @mock.patch.object(dbapi, 'clouds_create') - def test_post_cloud_with_invalid_property_name(self, mock_cloud): - data = {'name': 'cloud1', 'foo': 'invalidpropertyname'} - resp = self.post('v1/clouds', data=data) - self.assertEqual(400, resp.status_code) - mock_cloud.assert_not_called() - - @mock.patch.object(dbapi, 'clouds_create') - def test_create_cloud_returns_cloud_obj(self, mock_cloud): - return_value = {'name': 'cloud1', - 'project_id': 'abcd', - 'id': 1, - 'variables': {"key1": "value1", "key2": "value2"}} - fake_cloud = fake_resources.CLOUD1 - fake_cloud.variables = {"key1": "value1", "key2": "value2"} - mock_cloud.return_value = fake_cloud - data = {'name': 'cloud1', - 'variables': {"key1": "value1", "key2": "value2"}} - resp = self.post('v1/clouds', data=data) - self.assertEqual(201, resp.status_code) - self.assertEqual(return_value, resp.json) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "http://localhost/v1/clouds/1" - ) - - @mock.patch.object(dbapi, 'clouds_create') - def test_post_cloud_with_invalid_data_fails(self, mock_cloud): - mock_cloud.return_value = None - data = {} - resp = self.post('v1/clouds', data=data) - self.assertEqual(400, resp.status_code) - - -class APIV1CloudsVariablesTest(APIV1Test): - @mock.patch.object(dbapi, 'resource_get_by_id') - def test_cloud_get_variables(self, mock_cloud): - mock_cloud.return_value = fake_resources.CLOUD1 - resp = self.get('v1/clouds/1/variables') - expected = {"variables": {"key1": "value1", "key2": "value2"}} - self.assertEqual(resp.json, expected) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_clouds_put_variables(self, mock_cloud): - db_return_value = copy.deepcopy(fake_resources.CLOUD1) - db_return_value.variables["a"] = "b" - mock_cloud.return_value = db_return_value - payload = {"a": "b"} - db_data = payload.copy() - resp = self.put('v1/clouds/1/variables', data=payload) - self.assertEqual(resp.status_code, 200) - mock_cloud.assert_called_once_with(mock.ANY, "clouds", '1', db_data) - expected = { - "variables": {"key1": "value1", "key2": "value2", "a": "b"}, - } - self.assertDictEqual(expected, resp.json) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_clouds_put_variables_bad_data_type(self, mock_cloud): - payload = ["a", "b"] - resp = self.put('v1/clouds/1/variables', data=payload) - self.assertEqual(resp.status_code, 400) - mock_cloud.assert_not_called() - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_clouds_delete_variables(self, mock_cloud): - payload = ["key1"] - db_data = payload.copy() - resp = self.delete('v1/clouds/1/variables', data=payload) - self.assertEqual(resp.status_code, 204) - mock_cloud.assert_called_once_with(mock.ANY, "clouds", '1', db_data) - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_clouds_delete_variables_bad_data_type(self, mock_cloud): - payload = {"a": "b"} - resp = self.delete('v1/clouds/1/variables', data=payload) - self.assertEqual(resp.status_code, 400) - mock_cloud.assert_not_called() - - -class APIV1RegionsIDTest(APIV1Test): - @mock.patch.object(dbapi, 'regions_get_by_id') - def test_regions_get_by_id(self, mock_regions): - mock_regions.return_value = fake_resources.REGION1 - resp = self.get('v1/regions/1') - self.assertEqual(resp.json['name'], fake_resources.REGION1.name) - - @mock.patch.object(dbapi, 'regions_get_by_id') - def test_regions_get_by_bad_id_is_404(self, mock_regions): - mock_regions.side_effect = exceptions.NotFound() - resp = self.get('v1/regions/1') - self.assertEqual(404, resp.status_code) - - @mock.patch.object(dbapi, 'regions_delete') - def test_delete_region(self, mock_region): - resp = self.delete('v1/regions/1') - self.assertEqual(204, resp.status_code) - - @mock.patch.object(dbapi, 'regions_update') - def test_put_regions_by_id(self, mock_region): - data = {'note': 'new note', 'name': 'new name'} - resp = self.put('v1/regions/1', data=data) - self.assertEqual(200, resp.status_code) - mock_region.assert_called_once_with(mock.ANY, '1', data) - - @mock.patch.object(dbapi, 'regions_update') - def test_put_regions_by_id_invalid_property(self, mock_region): - data = {'foo': 'isinvalid'} - resp = self.put('v1/regions/1', data=data) - self.assertEqual(400, resp.status_code) - mock_region.assert_not_called() - - @mock.patch.object(dbapi, 'regions_update') - def test_update_region(self, mock_region): - record = dict(fake_resources.REGION1.items()) - payload = {"name": "region_New1"} - db_data = payload.copy() - record.update(payload) - mock_region.return_value = record - - resp = self.put('v1/regions/1', data=payload) - - self.assertEqual(resp.status_code, 200) - self.assertEqual(resp.json['name'], 'region_New1') - mock_region.assert_called_once_with(mock.ANY, '1', db_data) - - -class APIV1RegionsTest(APIV1Test): - @mock.patch.object(dbapi, 'regions_get_all') - def test_regions_get_all(self, mock_regions): - mock_regions.return_value = (fake_resources.REGIONS_LIST, {}) - resp = self.get('v1/regions') - self.assertEqual(len(resp.json), len(fake_resources.REGIONS_LIST)) - - @mock.patch.object(dbapi, 'regions_get_all') - def test_regions_get_all_with_details(self, mock_regions): - mock_regions.return_value = (fake_resources.REGIONS_LIST, {}) - resp = self.get('v1/regions?details=all') - self.assertEqual(len(resp.json), len(fake_resources.REGIONS_LIST)) - for region in resp.json['regions']: - self.assertTrue('variables' in region) - - @mock.patch.object(dbapi, 'regions_get_all') - def test_regions_get_all_by_invalid_property_name(self, mock_regions): - resp = self.get('v1/regions?foo=invalidpropertyname') - self.assertEqual(400, resp.status_code) - mock_regions.assert_not_called() - - @mock.patch.object(dbapi, 'regions_get_by_name') - def test_regions_get_by_name_filters(self, mock_regions): - mock_regions.return_value = fake_resources.REGION1 - resp = self.get('v1/regions?name=region1') - regions = resp.json['regions'] - self.assertEqual(regions[0]["name"], fake_resources.REGION1.name) - - @mock.patch.object(dbapi, 'regions_get_by_id') - def test_regions_get_by_id_filters(self, mock_regions): - mock_regions.return_value = fake_resources.REGION1 - resp = self.get('v1/regions?id=1') - regions = resp.json['regions'] - self.assertEqual(regions[0]["name"], fake_resources.REGION1.name) - - @mock.patch.object(dbapi, 'regions_get_all') - def test_regions_get_by_vars_filters(self, mock_regions): - mock_regions.return_value = ([fake_resources.REGION1], {}) - resp = self.get('v1/regions?vars=somekey:somevalue') - self.assertEqual(len(resp.json['regions']), 1) - self.assertEqual(resp.json['regions'][0]["name"], - fake_resources.REGION1.name) - - @mock.patch.object(dbapi, 'regions_get_by_name') - def test_get_region_no_exist_by_name_fails(self, mock_regions): - mock_regions.side_effect = exceptions.NotFound() - resp = self.get('v1/regions?name=bla') - self.assertEqual(404, resp.status_code) - - @mock.patch.object(dbapi, 'regions_create') - def test_post_region_with_valid_data(self, mock_region): - mock_region.return_value = fake_resources.REGION1 - data = {'name': 'region1', 'cloud_id': 1} - resp = self.post('v1/regions', data=data) - self.assertEqual(201, resp.status_code) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "http://localhost/v1/regions/1" - ) - - @mock.patch.object(dbapi, 'regions_create') - def test_post_region_with_invalid_property_name(self, mock_region): - data = {'name': 'region1', 'foo': 'invalidpropertyname'} - resp = self.post('v1/regions', data=data) - self.assertEqual(400, resp.status_code) - mock_region.assert_not_called() - - @mock.patch.object(dbapi, 'regions_create') - def test_create_region_returns_region_obj(self, mock_region): - return_value = {'name': 'region1', - 'project_id': 'abcd', - 'id': 1, - 'cloud_id': 1, - 'variables': {"key1": "value1", "key2": "value2"}} - fake_region = fake_resources.REGION1 - fake_region.variables = {"key1": "value1", "key2": "value2"} - mock_region.return_value = fake_region - data = {'name': 'region1', 'cloud_id': 1, - 'variables': {"key1": "value1", "key2": "value2"}} - resp = self.post('v1/regions', data=data) - self.assertEqual(201, resp.status_code) - self.assertEqual(return_value, resp.json) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "http://localhost/v1/regions/1" - ) - - @mock.patch.object(dbapi, 'regions_create') - def test_post_region_with_invalid_data_fails(self, mock_region): - mock_region.return_value = None - data = {} - resp = self.post('v1/regions', data=data) - self.assertEqual(400, resp.status_code) - - -class APIV1RegionsVariablesTest(APIV1Test): - @mock.patch.object(dbapi, 'resource_get_by_id') - def test_region_get_variables(self, mock_region): - mock_region.return_value = fake_resources.REGION1 - resp = self.get('v1/regions/1/variables') - expected = {"variables": {"key1": "value1", "key2": "value2"}} - self.assertEqual(resp.json, expected) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_regions_put_variables(self, mock_region): - db_return_value = copy.deepcopy(fake_resources.REGION1) - db_return_value.variables["a"] = "b" - mock_region.return_value = db_return_value - payload = {"a": "b"} - db_data = payload.copy() - resp = self.put('v1/regions/1/variables', data=payload) - self.assertEqual(resp.status_code, 200) - mock_region.assert_called_once_with(mock.ANY, "regions", '1', db_data) - expected = { - "variables": {"key1": "value1", "key2": "value2", "a": "b"}, - } - self.assertDictEqual(expected, resp.json) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_regions_put_variables_bad_data_type(self, mock_region): - payload = ["a", "b"] - resp = self.put('v1/regions/1/variables', data=payload) - self.assertEqual(resp.status_code, 400) - mock_region.assert_not_called() - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_regions_delete_variables(self, mock_region): - payload = ["key1"] - db_data = payload.copy() - resp = self.delete('v1/regions/1/variables', data=payload) - self.assertEqual(resp.status_code, 204) - mock_region.assert_called_once_with(mock.ANY, "regions", '1', db_data) - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_regions_delete_variables_bad_data_type(self, mock_region): - payload = {"a": "b"} - resp = self.delete('v1/regions/1/variables', data=payload) - self.assertEqual(resp.status_code, 400) - mock_region.assert_not_called() - - -class APIV1HostsIDTest(APIV1Test): - @mock.patch.object(dbapi, 'hosts_get_by_id') - def test_get_hosts_by_id(self, mock_hosts): - mock_hosts.return_value = fake_resources.HOST1 - resp = self.get('v1/hosts/1') - self.assertEqual(resp.json["name"], fake_resources.HOST1.name) - - @mock.patch.object(dbapi, 'hosts_get_by_id') - def test_get_hosts_by_id_invalid_property_name(self, mock_hosts): - resp = self.get('/v1/hosts/1?foo=invalidproperty') - self.assertEqual(400, resp.status_code) - mock_hosts.assert_not_called() - - @mock.patch.object(dbapi, 'hosts_update') - def test_put_hosts_by_id_invalid_property_name(self, mock_hosts): - resp = self.put('/v1/hosts/1', data={'foo': 'invalidproperty'}) - self.assertEqual(400, resp.status_code) - mock_hosts.assert_not_called() - - @mock.patch.object(dbapi, 'hosts_get_by_id') - def test_get_hosts_by_bad_id_is_404(self, mock_hosts): - mock_hosts.side_effect = exceptions.NotFound() - resp = self.get('v1/hosts/1') - self.assertEqual(404, resp.status_code) - - @mock.patch.object(dbapi, 'hosts_get_by_id') - def test_get_hosts_resolved_vars(self, mock_host): - region_vars = {"r_var": "one"} - host = fake_resources.HOST1 - host.resolved.update(region_vars) - expected = {"r_var": "one", "key1": "value1", "key2": "value2"} - mock_host.return_value = host - resp = self.get('v1/hosts/1') - self.assertEqual(resp.json["variables"], expected) - - @mock.patch.object(dbapi, 'hosts_get_by_id') - def test_get_hosts_no_resolved_vars(self, mock_host): - region_vars = {"r_var": "one"} - host = fake_resources.HOST1 - host.resolved.update(region_vars) - expected = {"key1": "value1", "key2": "value2"} - mock_host.return_value = host - resp = self.get('v1/hosts/1?resolved-values=false') - self.assertEqual(resp.json["variables"], expected) - - @mock.patch.object(api.v1.resources.utils, 'get_device_type') - @mock.patch.object(dbapi, 'hosts_update') - def test_update_host(self, mock_host, mock_get_device_type): - mock_get_device_type.return_value = "network_devices" - record = dict(fake_resources.HOST1.items()) - payload = {'name': 'Host_New', 'parent_id': 2} - db_data = payload.copy() - record.update(payload) - mock_host.return_value = record - - resp = self.put('/v1/hosts/1', data=payload) - - self.assertEqual(resp.json['name'], db_data['name']) - self.assertEqual(resp.json['parent_id'], db_data['parent_id']) - self.assertEqual(200, resp.status_code) - mock_host.assert_called_once_with(mock.ANY, '1', db_data) - mock_get_device_type.assert_called_once() - up_link = { - "rel": "up", - "href": "http://localhost/v1/network-devices/2" - } - self.assertIn(up_link, resp.json["links"]) - - -class APIV1HostsLabelsTest(APIV1Test): - @mock.patch.object(dbapi, 'hosts_get_by_id') - def test_get_hosts_labels(self, mock_host): - mock_host.return_value = fake_resources.HOST4 - resp = self.get('v1/hosts/1/labels') - self.assertEqual(resp.json["labels"], ["a", "b"]) - - @mock.patch.object(dbapi, 'hosts_labels_update') - def test_put_hosts_labels(self, mock_host): - payload = {"labels": ["a", "b"]} - mock_host.return_value = fake_resources.HOST4 - resp = self.put('v1/hosts/1/labels', data=payload) - self.assertEqual(200, resp.status_code) - self.assertEqual(resp.json, payload) - - @mock.patch.object(dbapi, 'hosts_labels_update') - def test_put_hosts_labels_invalid_property_name(self, mock_host): - req_data = {"labels": ["a", "b"], "foo": ["should", "be", "removed"]} - resp = self.put('v1/hosts/1/labels', data=req_data) - self.assertEqual(400, resp.status_code) - mock_host.assert_not_called() - - @mock.patch.object(dbapi, 'hosts_labels_update') - def test_put_hosts_labels_validate_type(self, mock_host): - payload = {"labels": {"a": "b"}} - mock_host.return_value = fake_resources.HOST4 - resp = self.put('v1/hosts/1/labels', data=payload) - self.assertEqual(400, resp.status_code) - - @mock.patch.object(dbapi, 'hosts_labels_delete') - def test_hosts_delete_labels(self, mock_host): - payload = {"labels": ["label1", "label2"]} - db_data = payload.copy() - resp = self.delete('v1/hosts/1/labels', data=payload) - self.assertEqual(resp.status_code, 204) - mock_host.assert_called_once_with(mock.ANY, '1', db_data) - - @mock.patch.object(dbapi, 'hosts_labels_delete') - def test_hosts_delete_labels_bad_data_type(self, mock_host): - payload = ["label1", "label2"] - resp = self.delete('v1/hosts/1/labels', data=payload) - self.assertEqual(resp.status_code, 400) - mock_host.assert_not_called() - - -class APIV1HostsTest(APIV1Test): - @mock.patch.object(dbapi, 'hosts_get_all') - def test_get_hosts_by_region_gets_all_hosts(self, fake_hosts): - fake_hosts.return_value = (fake_resources.HOSTS_LIST_R1, {}) - resp = self.get('/v1/hosts?region_id=1') - self.assertEqual(len(resp.json), 2) - - @mock.patch.object(dbapi, 'hosts_get_all') - def test_hosts_get_all_with_details(self, mock_hosts): - mock_hosts.return_value = (fake_resources.HOSTS_LIST_R1, {}) - resp = self.get('v1/hosts?details=all') - self.assertEqual(len(resp.json), len(fake_resources.HOSTS_LIST_R1)) - for host in resp.json['hosts']: - self.assertTrue('variables' in host) - - @mock.patch.object(dbapi, 'hosts_get_all') - def test_get_hosts_invalid_property_name(self, fake_hosts): - resp = self.get('/v1/hosts?foo=invalidproperty') - self.assertEqual(400, resp.status_code) - fake_hosts.assert_not_called() - - @mock.patch.object(dbapi, 'hosts_get_all') - def test_get_host_by_non_existing_region_raises404(self, fake_hosts): - fake_hosts.side_effect = exceptions.NotFound() - resp = self.get('/v1/hosts?region_id=5') - self.assertEqual(404, resp.status_code) - - @mock.patch.object(dbapi, 'hosts_get_all') - def test_get_hosts(self, fake_hosts): - fake_hosts.return_value = (fake_resources.HOSTS_LIST_R3, {}) - resp = self.get('/v1/hosts') - self.assertEqual(len(resp.json['hosts']), 3) - fake_hosts.assert_called_once_with( - mock.ANY, {'resolved-values': True}, - {'limit': 30, 'marker': None}, - ) - - @mock.patch.object(dbapi, 'hosts_get_all') - def test_get_host_by_name_filters(self, fake_hosts): - fake_hosts.return_value = (fake_resources.HOSTS_LIST_R2, {}) - resp = self.get('/v1/hosts?region_id=1&name=www.example.net') - host_resp = fake_resources.HOSTS_LIST_R2 - self.assertEqual(len(resp.json['hosts']), len(host_resp)) - self.assertEqual(resp.json['hosts'][0]["name"], host_resp[0].name) - - @mock.patch.object(dbapi, 'hosts_get_all') - def test_get_host_by_ip_address_filter(self, fake_hosts): - region_id = 1 - ip_address = '10.10.0.1' - filters = { - 'region_id': 1, 'ip_address': ip_address, - 'resolved-values': True, - } - path_query = '/v1/hosts?region_id={}&ip_address={}'.format( - region_id, ip_address - ) - fake_hosts.return_value = (fake_resources.HOSTS_LIST_R2, {}) - resp = self.get(path_query) - host_resp = fake_resources.HOSTS_LIST_R2 - self.assertEqual(len(resp.json['hosts']), 1) - self.assertEqual(resp.json['hosts'][0]["name"], host_resp[0].name) - - fake_hosts.assert_called_once_with( - mock.ANY, filters, {'limit': 30, 'marker': None}, - ) - - @mock.patch.object(dbapi, 'hosts_get_all') - def test_get_host_by_vars_filters(self, fake_hosts): - fake_hosts.return_value = ([fake_resources.HOST1], {}) - resp = self.get('/v1/hosts?region_id=1&vars=somekey:somevalue') - self.assertEqual(len(resp.json['hosts']), 1) - self.assertEqual(resp.json['hosts'][0]["name"], - fake_resources.HOST1.name) - - @mock.patch.object(dbapi, 'hosts_get_all') - def test_get_host_by_label_filters(self, fake_hosts): - fake_hosts.return_value = (fake_resources.HOSTS_LIST_R2, {}) - resp = self.get('/v1/hosts?region_id=1&label=somelabel') - host_resp = fake_resources.HOSTS_LIST_R2 - self.assertEqual(len(resp.json['hosts']), len(host_resp)) - self.assertEqual(resp.json['hosts'][0]["name"], host_resp[0].name) - - @mock.patch.object(dbapi, 'hosts_create') - def test_create_host_with_valid_data(self, mock_host): - mock_host.return_value = fake_resources.HOST1 - data = {'name': 'www.craton.com', 'region_id': 1, 'cloud_id': 1, - 'ip_address': '192.168.1.1', 'device_type': 'server', - 'active': True} - resp = self.post('/v1/hosts', data=data) - self.assertEqual(201, resp.status_code) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "http://localhost/v1/hosts/1" - ) - - @mock.patch.object(dbapi, 'hosts_create') - def test_create_host_returns_host_obj(self, mock_host): - mock_host.return_value = fake_resources.HOST1 - data = { - 'name': 'www.craton.com', - 'region_id': 1, - 'cloud_id': 1, - 'ip_address': '192.168.1.1', - 'device_type': 'server', - 'labels': [], - 'variables': {"key1": "value1", "key2": "value2"}, - } - resp = self.post('v1/hosts', data=data) - self.assertEqual(201, resp.status_code) - expected_response = { - 'id': 1, - 'name': 'www.craton.com', - 'region_id': 1, - 'cloud_id': 1, - 'project_id': 1, - 'ip_address': '192.168.1.1', - 'device_type': 'server', - 'labels': [], - 'variables': {"key1": "value1", "key2": "value2"}, - 'cell_id': None, - 'parent_id': None, - 'links': [{'href': 'http://localhost/v1/regions/1', 'rel': 'up'}], - } - self.assertEqual(expected_response, resp.json) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "http://localhost/v1/hosts/1" - ) - - @mock.patch.object(dbapi, 'hosts_create') - def test_create_host_invalid_property_name(self, mock_host): - data = {'name': 'www.host1.com', 'region_id': 1, 'foo': 'invalidprop', - 'ip_address': '10.0.0.1', 'device_type': 'server'} - - resp = self.post('v1/hosts', data=data) - - self.assertEqual(400, resp.status_code) - mock_host.assert_not_called() - - -class APIV1HostsVariablesTest(APIV1Test): - @mock.patch.object(dbapi, 'resource_get_by_id') - def test_host_get_variables(self, mock_host): - mock_host.return_value = fake_resources.HOST1 - resp = self.get('v1/hosts/1/variables?resolved-values=false') - expected = {"variables": {"key1": "value1", "key2": "value2"}} - self.assertEqual(resp.json, expected) - - @mock.patch.object(dbapi, 'resource_get_by_id') - def test_host_get_variables_invalid_property_name(self, mock_host): - resp = self.get('v1/hosts/1/variables?foo=isnotreal') - self.assertEqual(400, resp.status_code) - mock_host.assert_not_called() - - @mock.patch.object(dbapi, 'resource_get_by_id') - def test_host_get_resolved_variables(self, mock_host): - region_vars = {"r_var": "somevar"} - host = fake_resources.HOST1 - host.resolved.update(region_vars) - expected = {"r_var": "somevar", "key1": "value1", "key2": "value2"} - mock_host.return_value = host - resp = self.get('v1/hosts/1/variables') - self.assertEqual(resp.json["variables"], expected) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_hosts_put_data(self, mock_host): - db_return_value = copy.deepcopy(fake_resources.HOST1) - db_return_value.variables["a"] = "b" - mock_host.return_value = db_return_value - payload = {"a": "b"} - db_data = payload.copy() - resp = self.put('v1/hosts/1/variables', data=payload) - self.assertEqual(resp.status_code, 200) - mock_host.assert_called_once_with(mock.ANY, "hosts", '1', db_data) - expected = { - "variables": {"key1": "value1", "key2": "value2", "a": "b"}, - } - self.assertDictEqual(expected, resp.json) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_hosts_put_variables_bad_data_type(self, mock_host): - payload = ["a", "b"] - resp = self.put('v1/hosts/1/variables', data=payload) - self.assertEqual(resp.status_code, 400) - mock_host.assert_not_called() - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_hosts_delete_variables(self, mock_host): - payload = ["key1"] - db_data = payload.copy() - resp = self.delete('v1/hosts/1/variables', data=payload) - self.assertEqual(resp.status_code, 204) - mock_host.assert_called_once_with(mock.ANY, "hosts", '1', db_data) - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_hosts_delete_variables_bad_data_type(self, mock_host): - payload = {"a": "b"} - resp = self.delete('v1/hosts/1/variables', data=payload) - self.assertEqual(resp.status_code, 400) - mock_host.assert_not_called() - - -class APIV1ProjectsTest(APIV1Test): - @mock.patch.object(dbapi, 'projects_create') - def test_create_project(self, mock_project): - mock_project.return_value = fake_resources.PROJECT1 - data = {'name': 'project1'} - resp = self.post('v1/projects', data=data) - self.assertEqual(resp.status_code, 201) - self.assertEqual( - resp.json['id'], - "4534dcb4-dacd-474f-8afc-8bd5ab2d26e8" - ) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "http://localhost/v1/projects/4534dcb4-dacd-474f-8afc-8bd5ab2d26e8" - ) - - @mock.patch.object(dbapi, 'projects_get_all') - def test_project_get_all(self, mock_projects): - proj1 = fake_resources.PROJECT1 - proj2 = fake_resources.PROJECT2 - return_value = ([proj1, proj2], {}) - mock_projects.return_value = return_value - - resp = self.get('v1/projects') - self.assertEqual(resp.status_code, 200) - self.assertEqual(len(resp.json['projects']), 2) - - @mock.patch.object(dbapi, 'projects_create') - def test_project_post_invalid_property(self, mock_projects): - data = {'foo': 'isinvalidproperty'} - resp = self.post('v1/projects', data=data) - self.assertEqual(400, resp.status_code) - mock_projects.assert_not_called() - - @mock.patch.object(dbapi, 'projects_get_all') - def test_projects_get_no_admin_fails(self, mock_project): - mock_project.side_effect = exceptions.AdminRequired() - resp = self.get('v1/projects') - self.assertEqual(resp.status_code, 401) - - @mock.patch.object(dbapi, 'projects_get_by_id') - def test_project_get_by_id(self, mock_project): - proj1 = fake_resources.PROJECT1 - proj1_id = str(proj1.id) - mock_project.return_value = proj1 - - resp = self.get('v1/projects/{}'.format(proj1_id)) - self.assertEqual(resp.status_code, 200) - self.assertEqual(resp.json['id'], proj1_id) - - @mock.patch.object(dbapi, 'projects_get_by_name') - def test_project_get_by_name(self, mock_projects): - proj1 = fake_resources.PROJECT1 - proj1_id = str(proj1.id) - return_value = ([proj1], {}) - mock_projects.return_value = return_value - - resp = self.get('v1/projects?name=project1') - self.assertEqual(resp.status_code, 200) - - projects = resp.json['projects'] - self.assertEqual(len(projects), 1) - self.assertEqual(projects[0]['id'], proj1_id) - - -class APIV1ProjectsVariablesTest(APIV1Test): - @mock.patch.object(dbapi, 'resource_get_by_id') - def test_projects_get_variables(self, mock_project): - mock_project.return_value = fake_resources.PROJECT1 - resp = self.get( - 'v1/projects/{}/variables'.format(fake_resources.PROJECT1.id) - ) - expected = {"variables": {"key1": "value1", "key2": "value2"}} - self.assertEqual(resp.json, expected) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_projects_put_variables(self, mock_project): - proj1 = fake_resources.PROJECT1 - proj1_id = str(proj1.id) - db_return_value = copy.deepcopy(proj1) - db_return_value.variables["a"] = "b" - mock_project.return_value = db_return_value - payload = {"a": "b"} - db_data = payload.copy() - resp = self.put( - 'v1/projects/{}/variables'.format(proj1_id), - data=payload - ) - self.assertEqual(resp.status_code, 200) - mock_project.assert_called_once_with(mock.ANY, "projects", proj1_id, - db_data) - expected = { - "variables": {"key1": "value1", "key2": "value2", "a": "b"}, - } - self.assertDictEqual(expected, resp.json) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_projects_put_variables_bad_data_type(self, mock_project): - payload = ["a", "b"] - resp = self.put( - 'v1/projects/{}/variables'.format(fake_resources.PROJECT1.id), - data=payload - ) - self.assertEqual(400, resp.status_code) - mock_project.assert_not_called() - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_projects_delete_variables(self, mock_project): - proj1 = fake_resources.PROJECT1 - proj1_id = str(proj1.id) - payload = ["key1"] - db_data = payload.copy() - resp = self.delete( - 'v1/projects/{}/variables'.format(proj1_id), data=payload - ) - self.assertEqual(resp.status_code, 204) - mock_project.assert_called_once_with(mock.ANY, "projects", proj1_id, - db_data) - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_projects_delete_variables_bad_data_type(self, mock_project): - payload = {'a': 'b'} - resp = self.delete( - 'v1/projects/{}/variables'.format(fake_resources.PROJECT1.id), - data=payload - ) - self.assertEqual(400, resp.status_code) - mock_project.assert_not_called() - - -class APIV1UsersTest(APIV1Test): - @mock.patch.object(dbapi, 'users_create') - @mock.patch.object(dbapi, 'projects_get_by_id') - def test_create_users(self, mock_project, mock_user): - mock_project.return_value = {'id': project_id1, 'name': 'project1'} - mock_user.return_value = fake_resources.USER1 - data = {'username': 'user1', 'is_admin': False, - 'project_id': project_id1} - resp = self.post('v1/users', data=data) - self.assertEqual(resp.status_code, 201) - self.assertEqual(resp.json['id'], 1) - self.assertIn("Location", resp.headers) - self.assertEqual( - resp.headers['Location'], - "http://localhost/v1/users/1" - ) - - @mock.patch.object(dbapi, 'users_create') - @mock.patch.object(dbapi, 'projects_get_by_id') - def test_create_users_invalid_property(self, mock_project, mock_user): - data = { - 'username': 'user1', - 'is_admin': False, - 'foo': 'isinvalidproperty' - } - resp = self.post('v1/users', data=data) - self.assertEqual(400, resp.status_code) - mock_project.assert_not_called() - mock_user.assert_not_called() - - @mock.patch.object(dbapi, 'users_get_all') - def test_users_get_all(self, mock_user): - return_values = ([fake_resources.USER1, fake_resources.USER2], {}) - mock_user.return_value = return_values - resp = self.get('v1/users') - self.assertEqual(resp.status_code, 200) - self.assertEqual(len(resp.json), 2) - self.assertEqual(len(resp.json['users']), 2) - - @mock.patch.object(dbapi, 'users_get_all') - def test_users_get_no_admin_fails(self, mock_user): - mock_user.side_effect = exceptions.AdminRequired() - resp = self.get('v1/users') - self.assertEqual(resp.status_code, 401) - - -class APIV1NetworksTest(APIV1Test): - @mock.patch.object(dbapi, 'networks_get_all') - def test_networks_by_region_gets_all_networks(self, fake_network): - fake_network.return_value = fake_resources.NETWORKS_LIST - resp = self.get('/v1/networks?region_id=1') - self.assertEqual(len(resp.json), 2) - - @mock.patch.object(dbapi, 'networks_get_all') - def test_networks_get_all_with_details(self, mock_networks): - mock_networks.return_value = (fake_resources.NETWORKS_LIST, {}) - resp = self.get('v1/networks?details=all') - self.assertEqual(len(resp.json), len(fake_resources.NETWORKS_LIST)) - for network in resp.json['networks']: - self.assertTrue('variables' in network) - - @mock.patch.object(dbapi, 'networks_get_all') - def test_get_networks_by_non_existing_region_raises404(self, fake_network): - fake_network.side_effect = exceptions.NotFound() - resp = self.get('/v1/networks?region_id=5') - self.assertEqual(404, resp.status_code) - - @mock.patch.object(dbapi, 'networks_get_all') - def test_get_networks_by_filters(self, fake_networks): - fake_networks.return_value = ([fake_resources.NETWORK1], {}) - resp = self.get('/v1/networks?region_id=1&name=PrivateNetwork') - net_resp = fake_resources.NETWORK1 - self.assertEqual(len(resp.json['networks']), 1) - self.assertEqual(resp.json['networks'][0]["name"], net_resp.name) - - @mock.patch.object(dbapi, 'networks_get_all') - def test_get_networks(self, fake_networks): - fake_networks.return_value = (fake_resources.NETWORKS_LIST2, {}) - resp = self.get('/v1/networks') - self.assertEqual(len(resp.json['networks']), 3) - fake_networks.assert_called_once_with( - mock.ANY, {'resolved-values': True, 'details': False}, - {'limit': 30, 'marker': None}, - ) - - @mock.patch.object(dbapi, 'networks_get_all') - def test_get_networks_invalid_property(self, fake_networks): - resp = self.get('/v1/networks?foo=invalid') - self.assertEqual(400, resp.status_code) - fake_networks.assert_not_called() - - @mock.patch.object(dbapi, 'networks_create') - def test_create_networks_with_valid_data(self, mock_network): - mock_network.return_value = fake_resources.NETWORK1 - data = { - 'name': 'PrivateNetwork', - 'cidr': '192.168.1.0/24', - 'gateway': '192.168.1.1', - 'netmask': '255.255.255.0', - 'variables': {'key1': 'value1'}, - 'region_id': 1, - 'cloud_id': 1, - } - resp = self.post('/v1/networks', data=data) - self.assertEqual(201, resp.status_code) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "http://localhost/v1/networks/1" - ) - - @mock.patch.object(dbapi, 'networks_create') - def test_create_network_returns_network_obj(self, mock_network): - mock_network.return_value = fake_resources.NETWORK1 - data = { - 'name': 'PrivateNetwork', - 'cidr': '192.168.1.0/24', - 'gateway': '192.168.1.1', - 'netmask': '255.255.255.0', - 'variables': {'key1': 'value1'}, - 'region_id': 1, - 'cloud_id': 1, - } - expected_result = copy.deepcopy(data) - expected_result.update({'id': 1, 'project_id': 1}) - - resp = self.post('/v1/networks', data=data) - self.assertEqual(201, resp.status_code) - self.assertEqual(expected_result, resp.json) - - @mock.patch.object(dbapi, 'networks_create') - def test_create_networks_with_invalid_data(self, mock_network): - mock_network.return_value = None - # data is missing entries - data = {'region_id': 1} - resp = self.post('v1/networks', data=data) - self.assertEqual(400, resp.status_code) - - @mock.patch.object(dbapi, 'networks_create') - def test_create_networks_with_invalid_property(self, mock_network): - data = {'name': 'some network', 'region_id': 1, - 'cidr': '10.10.1.0/24', 'gateway': '192.168.1.1', - 'netmask': '255.255.255.0', 'foo': 'isinvalid'} - resp = self.post('/v1/networks', data=data) - self.assertEqual(400, resp.status_code) - mock_network.assert_not_called() - - -class APIV1NetworksIDTest(APIV1Test): - @mock.patch.object(dbapi, 'networks_get_by_id') - def test_networks_get_by_id(self, mock_network): - mock_network.return_value = fake_resources.NETWORK1 - resp = self.get('v1/networks/1') - self.assertEqual(resp.json["name"], fake_resources.NETWORK1.name) - - @mock.patch.object(dbapi, 'networks_get_by_id') - def test_networks_get_by_bad_id_is_404(self, mock_network): - mock_network.side_effect = exceptions.NotFound() - resp = self.get('v1/networks/9') - self.assertEqual(404, resp.status_code) - - @mock.patch.object(dbapi, 'networks_update') - def test_update_network(self, mock_network): - record = dict(fake_resources.NETWORK1.items()) - payload = {"name": "Network_New1"} - db_data = payload.copy() - record.update(payload) - mock_network.return_value = record - - resp = self.put('v1/networks/1', data=payload) - - self.assertEqual(resp.json['name'], payload['name']) - self.assertEqual(resp.status_code, 200) - mock_network.assert_called_once_with(mock.ANY, '1', db_data) - - @mock.patch.object(dbapi, 'networks_update') - def test_update_network_invalid_property(self, mock_network): - payload = {"foo": "isinvalid"} - resp = self.put('v1/networks/1', data=payload) - self.assertEqual(400, resp.status_code) - mock_network.assert_not_called() - - @mock.patch.object(dbapi, 'networks_delete') - def test_delete_network(self, mock_network): - resp = self.delete('v1/networks/1') - self.assertEqual(204, resp.status_code) - - -class APIV1NetworksVariablesTest(APIV1Test): - @mock.patch.object(dbapi, 'resource_get_by_id') - def test_networks_get_variables(self, mock_network): - mock_network.return_value = fake_resources.NETWORK1 - resp = self.get('v1/networks/1/variables') - expected = {"variables": {"key1": "value1"}} - self.assertEqual(resp.status_code, 200) - self.assertEqual(resp.json, expected) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_networks_put_variables(self, mock_network): - db_return_value = copy.deepcopy(fake_resources.NETWORK1) - db_return_value.variables["a"] = "b" - mock_network.return_value = db_return_value - payload = {"a": "b"} - db_data = payload.copy() - resp = self.put('v1/networks/1/variables', data=payload) - self.assertEqual(resp.status_code, 200) - mock_network.assert_called_once_with( - mock.ANY, "networks", '1', db_data - ) - expected = { - "variables": {"key1": "value1", "a": "b"}, - } - self.assertDictEqual(expected, resp.json) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_networks_put_variables_bad_data_type(self, mock_network): - payload = ["a", "b"] - resp = self.put('v1/networks/1/variables', data=payload) - self.assertEqual(resp.status_code, 400) - mock_network.assert_not_called() - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_networks_delete_variables(self, mock_network): - payload = ["key1"] - db_data = payload.copy() - resp = self.delete('v1/networks/1/variables', data=payload) - self.assertEqual(resp.status_code, 204) - mock_network.assert_called_once_with( - mock.ANY, "networks", '1', db_data - ) - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_networks_delete_variables_bad_data_type(self, mock_network): - payload = {"a": "b"} - resp = self.delete('v1/networks/1/variables', data=payload) - self.assertEqual(resp.status_code, 400) - mock_network.assert_not_called() - - -class APIV1NetworkDevicesIDTest(APIV1Test): - @mock.patch.object(dbapi, 'network_devices_get_by_id') - def test_get_network_devices_by_id_invalid_property(self, fake_device): - resp = self.get('/v1/network-devices/1?foo=isaninvalidproperty') - self.assertEqual(400, resp.status_code) - fake_device.assert_not_called() - - @mock.patch.object(dbapi, 'network_devices_get_by_id') - def test_get_network_devices_by_id(self, fake_device): - fake_device.return_value = fake_resources.NETWORK_DEVICE1 - resp = self.get('/v1/network-devices/1') - self.assertEqual(resp.status_code, 200) - self.assertEqual(resp.json['name'], 'NetDevices1') - fake_device.assert_called_once_with(mock.ANY, '1') - - @mock.patch.object(api.v1.resources.utils, 'get_device_type') - @mock.patch.object(dbapi, 'network_devices_update') - def test_put_network_device(self, fake_device, mock_get_device_type): - mock_get_device_type.return_value = "network_devices" - payload = {"name": "NetDev_New1", "parent_id": 2} - fake_device.return_value = dict(fake_resources.NETWORK_DEVICE1.items(), - **payload) - resp = self.put('v1/network-devices/1', data=payload) - self.assertEqual(resp.status_code, 200) - self.assertEqual(resp.json['name'], "NetDev_New1") - self.assertEqual(resp.json['parent_id'], 2) - fake_device.assert_called_once_with( - mock.ANY, '1', {"name": "NetDev_New1", "parent_id": 2} - ) - mock_get_device_type.assert_called_once() - up_link = { - "rel": "up", - "href": "http://localhost/v1/network-devices/2" - } - self.assertIn(up_link, resp.json["links"]) - - @mock.patch.object(dbapi, 'network_devices_update') - def test_put_network_device_invalid_property(self, fake_device): - payload = {"foo": "isinvalid"} - resp = self.put('v1/network-devices/1', data=payload) - self.assertEqual(400, resp.status_code) - fake_device.assert_not_called() - - @mock.patch.object(dbapi, 'network_devices_get_by_id') - def test_get_network_devices_get_by_id(self, mock_devices): - mock_devices.return_value = fake_resources.NETWORK_DEVICE1 - resp = self.get('/v1/network-devices/1') - self.assertEqual(resp.json["name"], - fake_resources.NETWORK_DEVICE1.name) - - @mock.patch.object(dbapi, 'network_devices_delete') - def test_delete_network_devices(self, mock_devices): - resp = self.delete('v1/network-devices/1') - self.assertEqual(204, resp.status_code) - - -class APIV1NetworkDevicesTest(APIV1Test): - @mock.patch.object(dbapi, 'network_devices_get_all') - def test_get_network_devices_by_ip_address_filter(self, fake_devices): - region_id = '1' - ip_address = '10.10.0.1' - filters = {'region_id': region_id, 'ip_address': ip_address, - 'resolved-values': True} - path_query = '/v1/network-devices?region_id={}&ip_address={}'.format( - region_id, ip_address - ) - fake_devices.return_value = (fake_resources.NETWORK_DEVICE_LIST1, {}) - resp = self.get(path_query) - device_resp = fake_resources.NETWORK_DEVICE_LIST1 - self.assertEqual(len(resp.json['network_devices']), 1) - self.assertEqual(resp.json['network_devices'][0]["ip_address"], - device_resp[0].ip_address) - - fake_devices.assert_called_once_with( - mock.ANY, filters, {'limit': 30, 'marker': None}, - ) - - @mock.patch.object(dbapi, 'network_devices_get_all') - def test_get_network_devices_invalid_property(self, fake_devices): - resp = self.get('/v1/network-devices?foo=isaninvalidproperty') - self.assertEqual(400, resp.status_code) - fake_devices.assert_not_called() - - @mock.patch.object(dbapi, 'network_devices_get_all') - def test_get_network_devices(self, fake_devices): - fake_devices.return_value = fake_resources.NETWORK_DEVICE_LIST2 - resp = self.get('/v1/network-devices') - self.assertEqual(len(resp.json), 2) - fake_devices.assert_called_once_with( - mock.ANY, {'resolved-values': True}, - {'limit': 30, 'marker': None}, - ) - - @mock.patch.object(dbapi, 'network_devices_get_all') - def test_network_devices_get_by_region(self, mock_devices): - mock_devices.return_value = (fake_resources.NETWORK_DEVICE_LIST1, {}) - resp = self.get('/v1/network-devices?region_id=1') - network_devices = resp.json['network_devices'] - self.assertEqual(len(network_devices), 1) - self.assertEqual(200, resp.status_code) - self.assertEqual( - network_devices[0]["name"], - fake_resources.NETWORK_DEVICE_LIST1[0].name - ) - - @mock.patch.object(dbapi, 'network_devices_create') - def test_create_network_devices_with_valid_data(self, mock_devices): - mock_devices.return_value = fake_resources.NETWORK_DEVICE1 - data = {'name': 'NewNetDevice1', 'region_id': 1, 'cloud_id': 1, - 'device_type': 'Sample', 'ip_address': '0.0.0.0'} - resp = self.post('/v1/network-devices', data=data) - self.assertEqual(201, resp.status_code) - self.assertIn('Location', resp.headers) - self.assertEqual( - resp.headers['Location'], - "http://localhost/v1/network-devices/1" - ) - - @mock.patch.object(dbapi, 'network_devices_create') - def test_create_network_devices_returns_netdev_obj(self, mock_devices): - mock_devices.return_value = fake_resources.NETWORK_DEVICE1 - data = {'name': 'NetDevices1', 'region_id': 1, 'cloud_id': 1, - 'device_type': 'Server', 'ip_address': '10.10.0.1', - 'variables': {"key1": "value1", "key2": "value2"}} - expected_result = copy.deepcopy(data) - expected_result.update({ - 'id': 1, - 'project_id': 1, - 'cell_id': None, - 'parent_id': None, - 'links': [{'href': 'http://localhost/v1/regions/1', 'rel': 'up'}], - }) - resp = self.post('/v1/network-devices', data=data) - - self.assertEqual(201, resp.status_code) - self.assertEqual(expected_result, resp.json) - - @mock.patch.object(dbapi, 'network_devices_create') - def test_create_netdevices_with_invalid_data(self, mock_devices): - mock_devices.return_value = None - # data is missing entry - data = {'name': 'Sample'} - resp = self.post('/v1/network-devices', data=data) - self.assertEqual(400, resp.status_code) - - @mock.patch.object(dbapi, 'network_devices_create') - def test_create_netdevices_with_invalid_property(self, mock_devices): - data = {'name': 'NewNetDevice1', 'region_id': 1, - 'device_type': 'Sample', 'ip_address': '0.0.0.0', - 'foo': 'isinvalid'} - resp = self.post('/v1/network-devices', data=data) - self.assertEqual(400, resp.status_code) - mock_devices.assert_not_called() - - -class APIV1NetworkDevicesLabelsTest(APIV1Test): - @mock.patch.object(dbapi, 'network_devices_labels_update') - def test_network_devices_labels_update(self, mock_devices): - payload = {"labels": ["a", "b"]} - mock_devices.return_value = fake_resources.NETWORK_DEVICE1 - resp = self.put('v1/network-devices/1/labels', data=payload) - self.assertEqual(200, resp.status_code) - self.assertEqual(resp.json, payload) - - @mock.patch.object(dbapi, 'network_devices_labels_update') - def test_network_devices_labels_update_invalid_property(self, fake_device): - payload = {"foo": "isinvalid"} - resp = self.put('v1/network-devices/1/labels', data=payload) - self.assertEqual(400, resp.status_code) - fake_device.assert_not_called() - - @mock.patch.object(dbapi, 'network_devices_labels_delete') - def test_network_devices_delete_labels(self, mock_network_device): - payload = {"labels": ["label1", "label2"]} - db_data = payload.copy() - resp = self.delete('v1/network-devices/1/labels', data=payload) - self.assertEqual(resp.status_code, 204) - mock_network_device.assert_called_once_with(mock.ANY, '1', db_data) - - @mock.patch.object(dbapi, 'network_devices_labels_delete') - def test_network_devices_delete_labels_bad_data(self, mock_network_device): - payload = ["label1", "label2"] - resp = self.delete('v1/network-devices/1/labels', data=payload) - self.assertEqual(resp.status_code, 400) - mock_network_device.assert_not_called() - - -class APIV1NetworkDevicesVariablesTest(APIV1Test): - @mock.patch.object(dbapi, 'resource_get_by_id') - def test_network_devices_get_variables(self, mock_network_device): - mock_network_device.return_value = fake_resources.NETWORK_DEVICE1 - resp = self.get('v1/network-devices/1/variables') - expected = {"variables": {"key1": "value1", "key2": "value2"}} - self.assertEqual(resp.status_code, 200) - self.assertEqual(resp.json, expected) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_network_devices_put_variables(self, mock_network_device): - db_return_value = copy.deepcopy(fake_resources.NETWORK_DEVICE1) - db_return_value.variables["a"] = "b" - mock_network_device.return_value = db_return_value - payload = {"a": "b"} - db_data = payload.copy() - resp = self.put('v1/network-devices/1/variables', data=payload) - self.assertEqual(resp.status_code, 200) - mock_network_device.assert_called_once_with( - mock.ANY, "network-devices", '1', db_data - ) - expected = { - "variables": {"key1": "value1", "key2": "value2", "a": "b"}, - } - self.assertDictEqual(expected, resp.json) - - @mock.patch.object(dbapi, 'variables_update_by_resource_id') - def test_network_devices_put_variables_bad_data(self, mock_network_device): - payload = ["a", "b"] - resp = self.put('v1/network-devices/1/variables', data=payload) - self.assertEqual(resp.status_code, 400) - mock_network_device.assert_not_called() - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_network_devices_delete_variables(self, mock_network_device): - payload = ["key1"] - db_data = payload.copy() - resp = self.delete('v1/network-devices/1/variables', data=payload) - self.assertEqual(resp.status_code, 204) - mock_network_device.assert_called_once_with( - mock.ANY, "network-devices", '1', db_data - ) - - @mock.patch.object(dbapi, 'variables_delete_by_resource_id') - def test_network_devices_delete_vars_bad_data(self, mock_network_device): - payload = {"a": "b"} - resp = self.delete('v1/network-devices/1/variables', data=payload) - self.assertEqual(resp.status_code, 400) - mock_network_device.assert_not_called() - - -class APIV1NetworkInterfacesTest(APIV1Test): - @mock.patch.object(dbapi, 'network_interfaces_get_all') - def test_get_netinterfaces_by_ip_address_filter(self, fake_interfaces): - device_id = 1 - ip_address = '10.10.0.1' - filters = {'device_id': device_id, 'ip_address': ip_address} - path_query = ( - '/v1/network-interfaces?device_id={}&ip_address={}'.format( - device_id, ip_address - ) - ) - fake_interfaces.return_value = (fake_resources.NETWORK_INTERFACE_LIST1, - {}) - resp = self.get(path_query) - interface_resp = fake_resources.NETWORK_INTERFACE_LIST1 - self.assertEqual(len(resp.json['network_interfaces']), 1) - self.assertEqual(resp.json['network_interfaces'][0]["name"], - interface_resp[0].name) - - fake_interfaces.assert_called_once_with( - mock.ANY, filters, {'limit': 30, 'marker': None}, - ) - - @mock.patch.object(dbapi, 'network_interfaces_get_all') - def test_get_network_interfaces_by_device_id(self, fake_interfaces): - fake_interfaces.return_value = (fake_resources.NETWORK_INTERFACE_LIST1, - {}) - resp = self.get('/v1/network-interfaces?device_id=1') - network_interface_resp = fake_resources.NETWORK_INTERFACE1 - netifaces = resp.json['network_interfaces'] - self.assertEqual(netifaces[0]["name"], network_interface_resp.name) - self.assertEqual( - netifaces[0]['ip_address'], network_interface_resp.ip_address - ) - - @mock.patch.object(dbapi, 'network_interfaces_create') - def test_network_interfaces_create_with_valid_data(self, fake_interfaces): - fake_interfaces.return_value = fake_resources.NETWORK_INTERFACE1 - - data = {'name': 'NewNetInterface', 'device_id': 1, - 'ip_address': '10.10.0.1', 'interface_type': 'interface_type1'} - resp = self.post('/v1/network-interfaces', data=data) - self.assertEqual(201, resp.status_code) - self.assertEqual( - resp.json['ip_address'], data['ip_address'] - ) - self.assertIn("Location", resp.headers) - self.assertEqual( - resp.headers['Location'], - "http://localhost/v1/network-interfaces/1" - ) - - @mock.patch.object(dbapi, 'network_interfaces_create') - def test_network_interfaces_create_invalid_data(self, fake_interfaces): - fake_interfaces.return_value = fake_resources.NETWORK_INTERFACE1 - # data is missing entry - data = {'name': 'sample'} - resp = self.post('/v1/network-interfaces', data=data) - self.assertEqual(400, resp.status_code) - - @mock.patch.object(dbapi, 'network_interfaces_create') - def test_network_interfaces_create_invalid_property(self, fake_interfaces): - data = {'name': 'NewNetInterface', 'device_id': 1, - 'ip_address': '0.0.0.0', 'interface_type': 'Sample', - 'foo': 'isinvalid'} - resp = self.post('/v1/network-interfaces', data=data) - self.assertEqual(400, resp.status_code) - fake_interfaces.assert_not_called() - - @mock.patch.object(dbapi, 'network_interfaces_get_all') - def test_get_network_interfaces(self, fake_interfaces): - fake_interfaces.return_value = (fake_resources.NETWORK_INTERFACE_LIST2, - {}) - resp = self.get('/v1/network-interfaces') - self.assertEqual(200, resp.status_code) - self.assertEqual(len(resp.json['network_interfaces']), 2) - fake_interfaces.assert_called_once_with( - mock.ANY, {}, {'limit': 30, 'marker': None}, - ) - - @mock.patch.object(dbapi, 'network_interfaces_get_all') - def test_get_network_interfaces_invalid_property(self, fake_interfaces): - resp = self.get('/v1/network-interfaces?foo=invalid') - self.assertEqual(400, resp.status_code) - fake_interfaces.assert_not_called() - - -class APIV1NetworkInterfacesIDTest(APIV1Test): - @mock.patch.object(dbapi, 'network_interfaces_get_by_id') - def test_get_network_interfaces_by_id(self, fake_interfaces): - fake_interfaces.return_value = fake_resources.NETWORK_INTERFACE1 - resp = self.get('/v1/network-interfaces/1') - self.assertEqual(resp.json["name"], - fake_resources.NETWORK_INTERFACE1.name) - self.assertEqual( - resp.json['ip_address'], - fake_resources.NETWORK_INTERFACE1.ip_address - ) - - @mock.patch.object(dbapi, 'network_interfaces_update') - def test_network_interfaces_update(self, fake_interfaces): - record = dict(fake_resources.NETWORK_INTERFACE1.items()) - payload = {'name': 'New'} - db_data = payload.copy() - record.update(payload) - fake_interfaces.return_value = record - - resp = self.put('/v1/network-interfaces/1', data=payload) - - self.assertEqual(resp.json['name'], db_data['name']) - self.assertEqual(200, resp.status_code) - self.assertEqual( - resp.json['ip_address'], - fake_resources.NETWORK_INTERFACE1.ip_address - ) - fake_interfaces.assert_called_once_with(mock.ANY, '1', db_data) - - @mock.patch.object(dbapi, 'network_interfaces_update') - def test_network_interfaces_update_invalid_property(self, fake_interfaces): - payload = {'foo': 'invalid'} - resp = self.put('/v1/network-interfaces/1', data=payload) - - self.assertEqual(400, resp.status_code) - fake_interfaces.assert_not_called() - - @mock.patch.object(dbapi, 'network_interfaces_delete') - def test_network_interfaces_delete(self, fake_interfaces): - resp = self.delete('/v1/network-interfaces/1') - self.assertEqual(204, resp.status_code) diff --git a/craton/tests/unit/test_api_routes.py b/craton/tests/unit/test_api_routes.py deleted file mode 100644 index 1816db1..0000000 --- a/craton/tests/unit/test_api_routes.py +++ /dev/null @@ -1,27 +0,0 @@ -from craton import api -from craton.tests import TestCase - - -class TestRouteURLNaming(TestCase): - pass - - -def generate_route_naming_functions(cls): - def gen_test(endpoint, url): - def test(self): - pattern = ( - "^/v1/([a-z-]+|)" - "(/(/[a-z-]+)?)?" - ) - self.assertRegex(url, pattern) - test_name = 'test_route_naming_{}'.format(endpoint) - setattr(cls, test_name, test) - - app = api.setup_app() - for rule in app.url_map.iter_rules(): - endpoint = rule.endpoint[3:] - url = rule.rule - gen_test(endpoint, url) - - -generate_route_naming_functions(TestRouteURLNaming) diff --git a/craton/tests/unit/test_api_schema.py b/craton/tests/unit/test_api_schema.py deleted file mode 100644 index 43ce2d0..0000000 --- a/craton/tests/unit/test_api_schema.py +++ /dev/null @@ -1,207 +0,0 @@ -import jsonschema - -from craton import api -from craton.api.v1.schemas import filters, validators -from craton.tests import TestCase - - -VALIDATORS = { - "with_schema": [ - ('ansible_inventory', 'GET'), - ('cells', 'GET'), - ('cells', 'POST'), - ('cells_id', 'GET'), - ('cells_id', 'PUT'), - ('devices', 'GET'), - ('hosts', 'GET'), - ('hosts', 'POST'), - ('hosts_id', 'GET'), - ('hosts_id', 'PUT'), - ('hosts_labels', 'DELETE'), - ('hosts_labels', 'GET'), - ('hosts_labels', 'PUT'), - ('network_devices', 'GET'), - ('network_devices', 'POST'), - ('network_devices_id', 'GET'), - ('network_devices_id', 'PUT'), - ('network_devices_labels', 'GET'), - ('network_devices_labels', 'PUT'), - ('network_devices_labels', 'DELETE'), - ('network_interfaces', 'GET'), - ('network_interfaces', 'POST'), - ("network_interfaces_id", "GET"), - ('network_interfaces_id', 'PUT'), - ('networks', 'GET'), - ('networks', 'POST'), - ("networks_id", "GET"), - ('networks_id', 'PUT'), - ('projects', 'GET'), - ('projects', 'POST'), - ("projects_id", "GET"), - ('regions', 'GET'), - ('regions', 'POST'), - ("regions_id", "GET"), - ('regions_id', 'PUT'), - ('clouds', 'GET'), - ('clouds', 'POST'), - ("clouds_id", "GET"), - ('clouds_id', 'PUT'), - ('users', 'GET'), - ('users', 'POST'), - ("users_id", "GET"), - ('variables_with_resolve', 'DELETE'), - ('variables_with_resolve', 'GET'), - ('variables_with_resolve', 'PUT'), - ('variables_without_resolve', 'DELETE'), - ('variables_without_resolve', 'GET'), - ('variables_without_resolve', 'PUT'), - ], - "without_schema": [ - ('cells_id', 'DELETE'), - ('hosts_id', 'DELETE'), - ('network_devices_id', 'DELETE'), - ("network_interfaces_id", "DELETE"), - ("networks_id", "DELETE"), - ("projects_id", "DELETE"), - ("users_id", "DELETE"), - ("regions_id", "DELETE"), - ("clouds_id", "DELETE"), - ] -} - - -class TestAPISchema(TestCase): - """Confirm that valid schema are defined.""" - def test_all_validators_have_test(self): - known = set(VALIDATORS["with_schema"] + VALIDATORS["without_schema"]) - defined = set(validators.keys()) - self.assertSetEqual(known, defined) - - -def generate_schema_validation_functions(cls): - def gen_validator_schema_test(endpoint, method): - def test(self): - try: - loc_schema = validators[(endpoint, method)] - except KeyError: - self.fail( - 'The validator {} is missing from the schemas ' - 'validators object.'.format((endpoint, method)) - ) - - self.assertEqual(len(loc_schema), 1) - locations = { - 'GET': 'args', - 'DELETE': 'json', - 'PUT': 'json', - 'POST': 'json', - } - location, schema = loc_schema.popitem() - self.assertIn(method, locations) - self.assertEqual(locations[method], location) - self.assertIs( - jsonschema.Draft4Validator.check_schema(schema), None - ) - if 'type' not in schema or schema['type'] == 'object': - self.assertFalse(schema['additionalProperties']) - - name = '_'.join(('validator', endpoint, method)) - setattr(cls, 'test_valid_schema_{}'.format(name), test) - - for (endpoint, method) in VALIDATORS["with_schema"]: - gen_validator_schema_test(endpoint, method) - - def gen_no_validator_schema_test(endpoint, method): - def test(self): - try: - loc_schema = validators[(endpoint, method)] - except KeyError: - self.fail( - 'The validator {} is missing from the schemas ' - 'validators object.'.format((endpoint, method)) - ) - self.assertEqual({}, loc_schema) - name = '_'.join(('validator', endpoint, method)) - setattr(cls, 'test_no_schema_{}'.format(name), test) - - for (endpoint, method) in VALIDATORS["without_schema"]: - gen_no_validator_schema_test(endpoint, method) - - def gen_filter_test(name, schema): - def test(self): - self.assertIs( - jsonschema.Draft4Validator.check_schema(schema), None - ) - if 'type' not in schema or schema['type'] == 'object': - self.assertFalse(schema['additionalProperties']) - setattr(cls, 'test_valid_schema_{}'.format(name), test) - - for (endpoint, method), responses in filters.items(): - for return_code, json in responses.items(): - if json['schema']: - name = '_'.join(('filter', endpoint, method, str(return_code))) - gen_filter_test(name, json['schema']) - - -generate_schema_validation_functions(TestAPISchema) - - -class TestSchemaLocationInRoute(TestCase): - def setUp(self): - super().setUp() - self.app = api.setup_app() - - -def generate_endpoint_method_validation_functions(cls): - def gen_test(test_type, endpoint, method): - def test(self): - rules = [ - rule for rule in self.app.url_map.iter_rules() - if rule.endpoint == endpoint and method in rule.methods - ] - self.assertEqual(len(rules), 1) - test_name = 'test_{}_endpoint_method_in_routes_{}_{}'.format( - test_type, endpoint, method - ) - setattr(cls, test_name, test) - - for (_endpoint, method) in validators: - endpoint = "v1.{}".format(_endpoint) - gen_test('validators', endpoint, method) - - for (_endpoint, method) in filters: - endpoint = "v1.{}".format(_endpoint) - gen_test('filters', endpoint, method) - - -generate_endpoint_method_validation_functions(TestSchemaLocationInRoute) - - -class TestRoutesInValidators(TestCase): - pass - - -def generate_route_validation_functions(cls): - def gen_test(test_type, checker, endpoint, method): - def test(self): - self.assertIn((endpoint, method), checker) - test_name = 'test_route_in_{}_{}_{}'.format( - test_type, endpoint, method - ) - setattr(cls, test_name, test) - - app = api.setup_app() - for rule in app.url_map.iter_rules(): - # remove 'v1.' from start of endpoint - endpoint = rule.endpoint[3:] - for method in rule.methods: - if method == 'OPTIONS': - continue - elif method == 'HEAD' and 'GET' in rule.methods: - continue - else: - gen_test('validators', validators, endpoint, method) - gen_test('filters', filters, endpoint, method) - - -generate_route_validation_functions(TestRoutesInValidators) diff --git a/craton/tests/unit/test_util.py b/craton/tests/unit/test_util.py deleted file mode 100644 index 1742bd5..0000000 --- a/craton/tests/unit/test_util.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Tests for craton.util module.""" -import uuid - -from craton import tests -from craton import util - - -class TestProjectIdUtilities(tests.TestCase): - """Unit tests for the copy_project_id_into_json function.""" - - def test_adds_project_id_to_json(self): - """Verify we add the project_id to the json body.""" - project_id = uuid.uuid4().hex - self.context.tenant = project_id - json = util.copy_project_id_into_json(self.context, {}) - self.assertDictEqual({'project_id': project_id}, json) - - def test_defaults_project_id_to_zero(self): - """Verify if there's no tenant attribute on the context we use 0.""" - del self.context.tenant - json = util.copy_project_id_into_json(self.context, {}) - self.assertDictEqual({'project_id': ''}, json) diff --git a/craton/util.py b/craton/util.py deleted file mode 100644 index 76052bf..0000000 --- a/craton/util.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Module containing generic utilies for Craton.""" -from datetime import date -from decorator import decorator -from flask import json, Response -import werkzeug.exceptions - -from oslo_log import log - -import craton.exceptions as exceptions - -LOG = log.getLogger(__name__) - - -def copy_project_id_into_json(context, json, project_id_key='project_id'): - """Copy the project_id from the context into the JSON request body. - - :param context: - The request context object. - :param json: - The parsed JSON request body. - :returns: - The JSON with the project-id from the headers added as the - "project_id" value in the JSON. - :rtype: - dict - """ - json[project_id_key] = getattr(context, 'tenant', '') - return json - - -class JSONEncoder(json.JSONEncoder): - - def default(self, o): - if isinstance(o, date): - return o.isoformat() - return json.JSONEncoder.default(self, o) - - -JSON_KWARGS = { - "indent": 2, - "sort_keys": True, - "cls": JSONEncoder, - "separators": (",", ": "), -} - - -def handle_all_exceptions(e): - """Generate error Flask response object from exception.""" - headers = [("Content-Type", "application/json")] - if isinstance(e, exceptions.Base): - message = e.message - status = e.code - elif isinstance(e, werkzeug.exceptions.HTTPException): - message = e.description - status = e.code - # Werkzeug exceptions can include additional headers, those should be - # kept unless the header is "Content-Type" which is set by this - # function. - headers.extend( - h for h in e.get_headers(None) if h[0].lower() != "content-type" - ) - else: - LOG.exception(e) - e_ = exceptions.UnknownException - message = e_.message - status = e_.code - - body = { - "message": message, - "status": status, - } - - body_ = "{}\n".format(json.dumps(body, **JSON_KWARGS)) - return Response(body_, status, headers) - - -@decorator -def handle_all_exceptions_decorator(fn, *args, **kwargs): - try: - return fn(*args, **kwargs) - except Exception as e: - return handle_all_exceptions(e) diff --git a/craton/workflow/README.rst b/craton/workflow/README.rst deleted file mode 100644 index e69de29..0000000 diff --git a/craton/workflow/__init__.py b/craton/workflow/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/craton/workflow/ansible.py b/craton/workflow/ansible.py deleted file mode 100644 index 37e8ab5..0000000 --- a/craton/workflow/ansible.py +++ /dev/null @@ -1,4 +0,0 @@ -"""Tasks for managing the execution of Ansible playbooks - -Takes in account failure. -""" diff --git a/craton/workflow/base.py b/craton/workflow/base.py deleted file mode 100644 index 2ca387f..0000000 --- a/craton/workflow/base.py +++ /dev/null @@ -1,11 +0,0 @@ -import abc - - -class WorkflowFactory(object, metaclass=abc.ABCMeta): - - @abc.abstractmethod - def workflow(self): - """Construct appropriate taskflow flow object. - - :returns: A flow.Flow subclass - """ diff --git a/craton/workflow/testflow.py b/craton/workflow/testflow.py deleted file mode 100644 index 09079c7..0000000 --- a/craton/workflow/testflow.py +++ /dev/null @@ -1,40 +0,0 @@ -import time - -from oslo_log import log as logging -from taskflow import task -from taskflow.patterns import linear_flow - -from craton.workflow import base - -LOG = logging.getLogger(__name__) - - -class Sleep(task.Task): - def __init__(self, delay=10, **kwargs): - super(Sleep, self).__init__(**kwargs) - self.delay = delay - - def execute(self): - LOG.info('Doing task %s', self) - time.sleep(self.delay) - - -class Fail(task.Task): - def execute(self): - LOG.info('Failing task %s', self) - raise RuntimeError('failure in task %s' % self) - - -class TestFlow(base.WorkflowFactory): - def __init__(self, task_delay=5): - super(TestFlow, self).__init__() - self.task_delay = task_delay - - def workflow(self): - f = linear_flow.Flow('example') - f.add( - Sleep(name='step 1', delay=self.task_delay), - Sleep(name='step 2', delay=self.task_delay), - Fail(name='step 3'), - ) - return f diff --git a/craton/workflow/worker.py b/craton/workflow/worker.py deleted file mode 100644 index 82e1c69..0000000 --- a/craton/workflow/worker.py +++ /dev/null @@ -1,74 +0,0 @@ -import contextlib -import threading - -from oslo_config import cfg -from oslo_log import log as logging -from oslo_utils import uuidutils -from taskflow.conductors import backends as conductors -from taskflow.jobs import backends as boards -from taskflow.persistence import backends as persistence_backends -from zake import fake_client - - -LOG = logging.getLogger(__name__) -CONF = cfg.CONF - -OPTS = [ - cfg.StrOpt('job_board_name', default='craton_jobs', - help='Name of job board used to store outstanding jobs.'), - cfg.IntOpt('max_simultaneous_jobs', default=9, - help='Number of tasks to run in parallel on this worker.'), -] -CONF.register_opts(OPTS) - -TASKFLOW_OPTS = [ - cfg.StrOpt('connection', default='memory', - help='Taskflow backend used for persisting taskstate.'), - cfg.StrOpt('job_board_url', - default='zookeeper://localhost?path=/taskflow/craton/jobs', - help='URL used to store outstanding jobs'), - cfg.BoolOpt('db_upgrade', default=True, - help='Upgrade DB schema on startup.'), -] -CONF.register_opts(TASKFLOW_OPTS, group='taskflow') - - -def _get_persistence_backend(conf): - return persistence_backends.fetch({ - 'connection': conf.taskflow.connection, - }) - - -def _get_jobboard_backend(conf, persistence=None): - client = None - if conf.taskflow.connection == 'memory': - client = fake_client.FakeClient() - return boards.fetch(conf.job_board_name, - {'board': conf.taskflow.job_board_url}, - client=client, persistence=persistence) - - -def start(conf): - persistence = _get_persistence_backend(conf) - - if conf.taskflow.db_upgrade: - with contextlib.closing(persistence.get_connection()) as conn: - LOG.info('Checking for database schema upgrade') - conn.upgrade() - - my_name = uuidutils.generate_uuid() - LOG.info('I am %s', my_name) - - board = _get_jobboard_backend(conf, persistence=persistence) - - conductor = conductors.fetch( - 'nonblocking', my_name, board, - engine='parallel', - max_simultaneous_jobs=conf.max_simultaneous_jobs, - persistence=persistence) - - board.connect() - LOG.debug('Starting taskflow conductor loop') - threading.Thread(target=conductor.run).start() - - return persistence, board, conductor diff --git a/doc/source/api-reference.rst b/doc/source/api-reference.rst deleted file mode 100644 index d9c9d8a..0000000 --- a/doc/source/api-reference.rst +++ /dev/null @@ -1,21 +0,0 @@ -Craton's API Reference Guide -============================ -Resources: - -.. toctree:: - :maxdepth: 2 - - cells - devices - hosts - networks - net-devices - net-interfaces - regions - -API Usage: - -.. toctree:: - :maxdepth: 2 - - filtering-by-variables diff --git a/doc/source/arch-diagram.dot b/doc/source/arch-diagram.dot deleted file mode 100755 index c98e2b0..0000000 --- a/doc/source/arch-diagram.dot +++ /dev/null @@ -1,167 +0,0 @@ -digraph structs { - node [shape=plaintext] - -# overlap=false; -# splines=true; -# layout="neato"; - - Cli [label=< - - -
CLI
-
>]; - - PythonApi [label=< - - -
Python API
-
>]; - - - CratonCore [label=< - - - - - - -
- - - - - - - -
- Horizon UI
- Inventory,
Workflow Panels
-
Keystone
- Principals, roles,
privileges,
catalog endpoints
-
Barbican
- Key Storage for
TaskFlow Workers
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - -
RBACREST API Service (Flask)
Python Object Modeloslo.cache
Inventory FabricWorkflows
Virtualized
Variables
Default
Inventory
Model
TaskFlow
Controller
Variable
Plugin
(Stevedore)
SQL
Alchemy
Workflow
Plugin
(Stevedore)
-
- - - - - - - - - - - - - - -
REDIS
-
MySQL/Galera
-
TF
JobBoard
-
WA Log
Capture
-
TF
Worker
Pool
-
ZooKeeper
-
-
- ->]; - - - NovaPlugin [label=< - - -
Nova Plugin
- (Inventory)
-
>]; - - HistoryPlugin [label=< - - -
History Plugin
- (Inventory)
-
>]; - - AnsiblePlugin [label=< - - -
Ansible Plugin
- (Workflow)
-
>]; - - HistoricalData [label=< - - -
Historica lData -
>]; - - - Legend [label=< - - - - -
Legend -
Used For Scaling -
Future Work -
>]; - - - -//UndercloudIntegrations [pos="1,1"]; -#subgraph cluster1 { -# style=invis; -# Barbican; -# Horizon; -# Keystone; -# } - -ranksep=.25; -#size = "8,8"; -#{ rank = same; Horizon; CratonCore:PythonObjectModel; } - -#{ rank = same; UndercloudIntegrations; CratonCore; } -#Horizon -> Keystone [style=invis] -NovaPlugin -> Legend [style=invis]; -CratonCore:Barbican -> Legend [style=invis]; - - CratonCore:WaLogCapture -> HistoricalData:HistoricalData; - HistoryPlugin:HistoryPlugin -> HistoricalData:HistoricalData; - CratonCore:Horizon -> PythonApi:PythonApi [constraint=false]; - CratonCore:RBAC -> CratonCore:Keystone; - PythonApi:PythonApi -> CratonCore:RestApi; - Cli:Cli -> PythonApi:PythonApi; - CratonCore:VariablePlugin -> NovaPlugin:NovaPlugin; - CratonCore:VariablePlugin -> HistoryPlugin:HistoryPlugin; - CratonCore:WorkflowPlugin -> AnsiblePlugin:AnsiblePlugin; - CratonCore:OsloCache -> CratonCore:Redis [constraint=false]; - CratonCore:SqlAlchemy -> CratonCore:MySqlGalera; - -} diff --git a/doc/source/architecture.rst b/doc/source/architecture.rst deleted file mode 100755 index 5500d12..0000000 --- a/doc/source/architecture.rst +++ /dev/null @@ -1,75 +0,0 @@ -Architecture -============ - - -.. graphviz:: arch-diagram.dot - -CLI ---- -TODO: Add Documentation - -Python API ----------- -TODO: Add Documentation - -RBAC ----- -TODO: Add Documentation - -REST API Service (Flask) ------------------------- -TODO: Add Documentation - -Python Object Model -------------------- -TODO: Add Documentation - -oslo.cache ----------- -TODO: Add Documentation - -Inventory Fabric ----------------- -TODO: Add Documentation - -Workflows ---------- -TODO: Add Documentation - -Virtualized Variables ---------------------- -TODO: Add Documentation - -Default Inventory Mode ----------------------- -TODO: Add Documentation - -TaskFlow Controller -------------------- -TODO: Add Documentation - -Variable Plugin (Stevedore) ---------------------------- -TODO: Add Documentation - -SQL Alchemy ------------ -TODO: Add Documentation - -Workflow Plugin (Stevedore) ---------------------------- -TODO: Add Documentation - -Nova Plugin ------------ -TODO: Add Documentation - -History Plugin --------------- -TODO: Add Documentation - -Ansible Plugin --------------- -TODO: Add Documentation - - diff --git a/doc/source/cells.rst b/doc/source/cells.rst deleted file mode 100644 index cb5d1be..0000000 --- a/doc/source/cells.rst +++ /dev/null @@ -1,402 +0,0 @@ -.. _cells: - -===== -Cells -===== - -Definition of cell - -Create Cell -=========== -:POST: /v1/cells - -Create a new Cell - -Normal response codes: OK(201) - -Error response codes: invalid request(400), validation exception(405) - -Request -------- - -+------------+------+---------+-------------------------+ -| Name | In | Type | Description | -+============+======+=========+=========================+ -| name | body | string | Unique name of the cell | -+------------+------+---------+-------------------------+ -| region_id | body | integer | Unique ID of the region | -+------------+------+---------+-------------------------+ -| labels | body | string | User defined labels | -+------------+------+---------+-------------------------+ -| note | body | string | Note used for governance| -+------------+------+---------+-------------------------+ -| variables | body | object | User defined variables | -+------------+------+---------+-------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Example Cell Create -******************* - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/cells" \ - -d '{"name": "myCell", "region_id": 1}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+-----------+------+---------+-------------------------------+ -| Name | In | Type | Description | -+===========+======+=========+===============================+ -| cell | body | object | - id | -| | | | - name | -| | | | - region_id | -| | | | - labels | -| | | | - note | -| | | | - variables | -+-----------+------+---------+-------------------------------+ -| id | body | integer | Unique ID of the cell | -+-----------+------+---------+-------------------------------+ -| name | body | string | Unique name of the cell | -+-----------+------+---------+-------------------------------+ -| region_id | body | integer | Unique ID of the cell's region| -+-----------+------+---------+-------------------------------+ -| labels | body | string | User defined labels | -+-----------+------+---------+-------------------------------+ -| note | body | string | Note used for governance | -+-----------+------+---------+-------------------------------+ -| variables | body | object | User defined variables | -+-----------+------+---------+-------------------------------+ - -Example Cell Create -******************* - -.. code-block:: json - - { - "id": 1, - "name": "myCell", - "note": null, - "region_id": 1 - } - -List Cells -========== - -:GET: /v1/cells?region_id= - -Gets all Cells - -Normal response codes: OK(200) - -Error response codes: invalid request(400), cell not found(404), validation exception(405) - -Default response: unexpected error - -Request -------- - -+-----------+-------+--------+---------+----------------------------------+ -| Name | In | Type | Required| Description | -+===========+=======+========+=========+==================================+ -| region_id | query | string | Yes | ID of the region to get cells for| -+-----------+-------+--------+---------+----------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Example Cell List -***************** - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/cells?region_id=1" \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+------------+------+---------+-------------------------------+ -| Name | In | Type | Description | -+============+======+=========+===============================+ -| cells | body | array | Array of cell objects | -+------------+------+---------+-------------------------------+ -| id | body | integer | Unique ID of the cell | -+------------+------+---------+-------------------------------+ -| name | body | string | Unique name of the cell | -+------------+------+---------+-------------------------------+ -| region_id | body | integer | Unique ID of the cell's region| -+------------+------+---------+-------------------------------+ -| labels | body | string | User defined labels | -+------------+------+---------+-------------------------------+ -| note | body | string | Note used for governance | -+------------+------+---------+-------------------------------+ -| variables | body | object | User defined variables | -+------------+------+---------+-------------------------------+ - -Example Cell List -***************** - -.. code-block:: json - - [ - { - "id": 2, - "name": "cellJr", - "note": null, - "region_id": 1 - }, - { - "id": 1, - "name": "myCell", - "note": null, - "region_id": 1 - } - ] - -.. todo:: **Example Unexpected Error** - - ..literalinclude:: ./api_samples/errors/errors-unexpected-resp.json - :language: javascript - -Update Cells -============ - -:PUT: /v1/cells/{id} - -Update an existing cell - -Normal response codes: OK(200) - -Error response codes: invalid request(400), cell not found(404), validation exception(405) - -Request -------- - -+----------+------+---------+------------------------------------+ -| Name | In | Type | Description | -+==========+======+=========+====================================+ -| name | body | string | Unique name of the cell | -+----------+------+---------+------------------------------------+ -| labels | body | string | User defined labels | -+----------+------+---------+------------------------------------+ -| note | body | string | Note used for governance | -+----------+------+---------+------------------------------------+ - - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Example Cell Update -******************* - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/cells/1" \ - -XPUT \ - -d '{"name": "changedName"}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+----------+------+---------+-------------------------------+ -| Name | In | Type | Description | -+==========+======+=========+===============================+ -| cell | body | object | - id | -| | | | - name | -| | | | - region_id | -| | | | - labels | -| | | | - note | -| | | | - variables | -+----------+------+---------+-------------------------------+ -| id | body | integer | Unique ID of the cell | -+----------+------+---------+-------------------------------+ -| name | body | string | Unique name of the cell | -+----------+------+---------+-------------------------------+ -| region_id| body | integer | Unique ID of the cell's region| -+----------+------+---------+-------------------------------+ -| labels | body | string | User defined labels | -+----------+------+---------+-------------------------------+ -| note | body | string | Note used for governance | -+----------+------+---------+-------------------------------+ -| variables| body | object | User defined variables | -+----------+------+---------+-------------------------------+ - -Examples Cell Update -******************** - -.. code-block:: json - - { - "id": 1, - "name": "changedName", - "note": null, - "project_id": "717e9a21-6e2d-44e0-bc84-8398563bda06", - "region_id": 1 - } - -Update Cell Variables -===================== - -:PUT: /v1/cells/{id}/variables - -Update user defined variables for the cell - -Normal response codes: OK(200) - -Error response codes: invalid request(400), cell not found(404), validation exception(405) - -Request -------- - -+--------+------+---------+------------------------------------+ -| Name | In | Type | Description | -+========+======+=========+====================================+ -| key | body | string | Identifier | -+--------+------+---------+------------------------------------+ -| value | body | object | Data | -+--------+------+---------+------------------------------------+ -| id | path | integer | Unique ID of the cell to be updated| -+--------+------+---------+------------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Example Cell Update Variables -***************************** - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/cells/1/variables" \ - -XPUT \ - -d '{"newKey": "sampleKey"}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+--------+------+---------+-------------------------+ -| Name | In | Type | Description | -+========+======+=========+=========================+ -| key | body | string | Identifier | -+--------+------+---------+-------------------------+ -| value | body | object | Data | -+--------+------+---------+-------------------------+ - -Example Cell Update Variables -***************************** - -.. code-block:: json - - { - "variables": - { - "newKey": “sampleKey” - } - } - -Delete Cell -=========== - -:DELETE: /v1/cells/{id} - -Deletes an existing record of a Cell - -Normal response codes: no content(204) - -Error response codes: invalid request(400), cell not found(404) - -Request -------- - -+--------+------+---------+------------------------------------+ -| Name | In | Type | Description | -+========+======+=========+====================================+ -| id | path | integer | Unique ID of the cell to be deleted| -+--------+------+---------+------------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE - -Delete Cell Variables -===================== - -:DELETE: /v1/cells/{id}/variables - -Delete existing key/value variables for the cell - -Normal response codes: no content(204) - -Error response codes: invalid request(400), cell not found(404) validation exception(405) - -Request -------- - -+--------+------+---------+-------------------------+ -| Name | In | Type | Description | -+========+======+=========+=========================+ -| id | path | integer | Unique ID of the cell | -+--------+------+---------+-------------------------+ -| key | body | string | Identifier to be deleted| -+--------+------+---------+-------------------------+ -| value | body | object | Data to be deleted | -+--------+------+---------+-------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE diff --git a/doc/source/conf.py b/doc/source/conf.py deleted file mode 100755 index 7ee7ba7..0000000 --- a/doc/source/conf.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf-8 -*- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import sys - -sys.path.insert(0, os.path.abspath('../..')) -on_read_the_docs = os.environ.get('READTHEDOCS') == 'True' - -# -- General configuration ---------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.graphviz', - 'sphinx.ext.todo' -] -if not on_read_the_docs: - extensions.append('oslosphinx') - -# autodoc generation is a bit aggressive and a nuisance when doing heavy -# text edit cycles. -# execute "export SPHINX_DEBUG=1" in your terminal to disable - -# The suffix of source filenames. -source_suffix = '.rst' - -# List of patterns (relative to the source directory) used to ignore matching -# files. -exclude_patterns = [ - '**/template.rst', -] - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'Craton' - -# All contributors should update with their employers (if applicable); -# for individuals working on Craton, we can add Craton Developers when -# that becomes relevant. -copyright = u'2016, Rackspace, Intel' - -# If true, '()' will be appended to :func: etc. cross-reference text. -add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -add_module_names = True - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# -- Options for HTML output -------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. Major themes that come with -# Sphinx are currently 'default' and 'sphinxdoc'. -# html_theme_path = ["."] -# html_theme = '_theme' -# html_static_path = ['static'] - -html_sidebars = { '**': ['globaltoc.html', 'relations.html', 'sourcelink.html', 'searchbox.html'], } - -# Output file base name for HTML help builder. -htmlhelp_basename = '%sdoc' % project - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass -# [howto/manual]). -latex_documents = [ - ('index', - '%s.tex' % project, - u'%s Documentation' % project, - u'OpenStack Foundation', 'manual'), -] - -# If true, todo and todolist produce output. -todo_include_todos = True - -# Example configuration for intersphinx: refer to the Python standard library. -#intersphinx_mapping = {'http://docs.python.org/': None} diff --git a/doc/source/contributing.rst b/doc/source/contributing.rst deleted file mode 100644 index 1728a61..0000000 --- a/doc/source/contributing.rst +++ /dev/null @@ -1,4 +0,0 @@ -============ -Contributing -============ -.. include:: ../../CONTRIBUTING.rst diff --git a/doc/source/cratoncli.rst b/doc/source/cratoncli.rst deleted file mode 100644 index d7fbb54..0000000 --- a/doc/source/cratoncli.rst +++ /dev/null @@ -1,899 +0,0 @@ - -================================== -Craton service command-line client -================================== - -.. program:: craton - -Contents -^^^^^^^^ - -`craton usage`_ - -`craton optional arguments`_ - -`craton project-create`_ - -`craton project-delete`_ - -`craton project-list`_ - -`craton project-show`_ - -`craton project-update`_ - -`craton region-create`_ - -`craton region-delete`_ - -`craton region-list`_ - -`craton region-show`_ - -`craton region-update`_ - -`craton cell-create`_ - -`craton cell-delete`_ - -`craton cell-list`_ - -`craton cell-show`_ - -`craton cell-update`_ - -`craton device-create`_ - -`craton device-delete`_ - -`craton device-list`_ - -`craton device-show`_ - -`craton device-update`_ - -`craton host-create`_ - -`craton host-delete`_ - -`craton host-list`_ - -`craton host-show`_ - -`craton host-update`_ - -`craton user-list`_ - - -craton usage ------------- - -**Subcommands:** - -:program:`craton usage` - Show usages of craton client. - -:program:`craton project-create` - Create a new project. - -:program:`craton project-delete` - Delete a project. - -:program:`craton project-list` - List all projects. - -:program:`craton project-show` - Show detailed information about a project. - -:program:`craton project-update` - Update information about a project. - -:program:`craton region-create` - Create a new region. - -:program:`craton region-delete` - Delete a region. - -:program:`craton region-list` - List all regions. - -:program:`craton region-show` - Show detailed information about a region. - -:program:`craton region-update` - Update information about a region. - -:program:`craton cell-create` - Create a new cell. - -:program:`craton cell-delete` - Delete a cell. - -:program:`craton cell-list` - List all cells. - -:program:`craton cell-show` - Show detailed information about a cell. - -:program:`craton cell-update` - Update information about a cell. - -:program:`craton device-create` - Create a new device. - -:program:`craton device-delete` - Delete a device. - -:program:`craton device-list` - List all devices. - -:program:`craton device-show` - Show detailed information about a device. - -:program:`craton device-update` - Update information about a device. - -:program:`craton host-create` - Create a new host. - -:program:`craton host-delete` - Delete a host. - -:program:`craton host-list` - List all hosts. - -:program:`craton host-show` - Show detailed information about a host. - -:program:`craton host-update` - Update information about a host. - -:program:`craton user-list` - List the users of a project. - -:program:`craton help` - Display help about this program or one of its subcommands. - - -craton optional arguments -------------------------- - -.. option:: --version - - Show program's version number and exit. - -.. option:: -v, --verbose - - Print more verbose output. - -craton project-create ---------------------- - -.. program:: craton project-create - -Create a new project. - -:: - - usage: craton project-create [-n ] [-u ] - -**Optional arguments:** - -.. option:: -n , --name - - Name of the project. - -.. option:: -u , --uuid - - UUID of the project. - - -craton project-delete ---------------------- - -.. program:: craton project-delete - -Delete a project. - -:: - - usage: craton project-delete - -**Positional arguments:** - -.. option:: project - - UUID of the project. - - -craton project-list -------------------- - -.. program:: craton project-list - -List the projects. - -:: - - usage: craton project-list [--detail] [--limit ] - -**Optional arguments:** - -.. option:: --detail - - Show detailed information about the projects. - -.. option:: --limit - - Maximum number of projects to return per request, 0 for no limit. Default - is the maximum number used by the Craton API Service. - - -craton project-show -------------------- - -.. program:: craton project-show - -Show detailed information about a project. - -:: - - usage: craton project-show - -**Positional arguments:** - -.. option:: project - - UUID of the project. - - -craton project-update ---------------------- - -.. program:: craton project-update - -Update information about a project. - -:: - - usage: craton project-update [-n ] - -**Positional arguments:** - -.. option:: project - - UUID of the project. - -**Optional arguments:** - -.. option:: -n , --name - - New name for the project. - -craton region-create --------------------- - -.. program:: craton region-create - -Create a new region. - -:: - - usage: craton region-create [-n ] - [-u ] - [-p ] - [--note ] - -**Optional arguments:** - -.. option:: -n , --name - - Name of the region. - -.. option:: -u , --uuid - - UUID of the region. - -.. option:: -p , --project , --project_uuid - - UUID of the project that this region belongs to. - -.. option:: --note - - Note about the region. - - -craton region-delete --------------------- - -.. program:: craton region-delete - -Delete a region. - -:: - - usage: craton region-delete - -**Positional arguments:** - -.. option:: region - - UUID of the region. - -craton region-list ------------------- - -.. program:: craton region-list - -List the regions. - -:: - - usage: craton region-list [--detail] [--limit ] - [--sort-key ] [--sort-dir ] - [--fields [ ...]] - -**Optional arguments:** - -.. option:: --detail - - Show detailed information about the regions. - -.. option:: --limit - - Maximum number of regions to return per request, 0 for no limit. Default - is the maximum number used by the Craton API Service. - -.. option:: --sort-key - - Region field that will be used for sorting. - -.. option:: --sort-dir - - Sort direction: “asc” (the default) or “desc”. - -.. option:: --fields [ ...] - - One or more region fields. Only these fields will be fetched from the - server. Can not be used when ‘-- detail’ is specified. - -craton region-show ------------------- - -.. program:: craton region-show - -Show detailed information about a region. - -:: - - usage: craton region-show - -**Positional arguments:** - -.. option:: region - - UUID of the region. - -craton region-update --------------------- - -.. program:: craton region-update - -Update information about a region. - -:: - - usage: craton region-update [-n ] - -**Positional arguments:** - -.. option:: region - - UUID of the region. - -**Optional arguments:** - -.. option:: -n , --name - - New name for the region. - - -craton cell-create ------------------- - -.. program:: craton cell-create - -Create a new cell. - -:: - - usage: craton cell-create [-n ] - [-u ] - [-p ] - [-r ] - [--note ] - -**Optional arguments:** - -.. option:: -n , --name - - Name of the cell. - -.. option:: -u , --uuid - - UUID of the cell. - -.. option:: -p , --project , --project_uuid - - UUID of the project that this cell belongs to. - -.. option:: -r , --region , --region_uuid - - UUID of the region that this cell belongs to. - -.. option:: --note - - Note about the cell. - - -craton cell-delete ------------------- - -.. program:: craton cell-delete - -Delete a cell. - -:: - - usage: craton cell-delete - -**Positional arguments:** - -.. option:: cell - - UUID of the cell. - - -craton cell-list ----------------- - -.. program:: craton cell-list - -List the cells. - -:: - - usage: craton cell-list [--detail] [--limit ] - [--sort-key ] [--sort-dir ] - [--fields [ ...]] - [--region ] - -**Optional arguments:** - -.. option:: --detail - - Show detailed information about the cells. - -.. option:: -r , --region - - UUID of the region that contains the desired list of cells. - -.. option:: --limit - - Maximum number of cells to return per request, 0 for no limit. Default is - the maximum number used by the Craton API Service. - -.. option:: --sort-key - - Cell field that will be used for sorting. - -.. option:: --sort-dir - - Sort direction: “asc” (the default) or “desc”. - -.. option:: --fields [ ...] - - One or more cell fields. Only these fields will be fetched from the - server. Can not be used when ‘-- detail’ is specified. - - -craton cell-show ----------------- - -.. program:: craton cell-show - -Show detailed information about a cell. - -:: - - usage: craton cell-show - -**Positional arguments:** - -.. option:: cell - - UUID of the cell. - - -craton cell-update ------------------- - -.. program:: craton cell-update - -Update information about a cell. - -:: - - usage: craton cell-update [-n ] - -**Positional arguments:** - -.. option:: cell - - UUID of the cell. - -**Optional arguments:** - -.. option:: -n , --name - - New name for the cell. - - -craton device-create --------------------- - -.. program:: craton device-create - -Create a new device. - -:: - - usage: craton device-create [-n ] - [-t ] - [-a ] - [-u ] - [-p ] - [-r ] - [-c ] - [--note ] - -**Optional arguments:** - -.. option:: -n , --name - - Name of the device. - -.. option:: -t , --type - - Type of device. - -.. option:: -a , --active - - Active or inactive state for a device: ‘true’ or ‘false’. - -.. option:: -u , --uuid - - UUID of the device. - -.. option:: -p , --project , --project_uuid - - UUID of the project that this device belongs to. - -.. option:: -r , --region , --region_uuid - - UUID of the region that this device belongs to. - -.. option:: -c , --cell , --cell_uuid - - UUID of the cell that this device belongs to. - -.. option:: --note - - Note about the device. - - -craton device-delete --------------------- - -.. program:: craton device-delete - -Delete a device. - -:: - - usage: craton device-delete - -**Positional arguments:** - -.. option:: device - - UUID of the device. - - -craton device-list ------------------- - -.. program:: craton device-list - -List the devices. - -:: - - usage: craton device-list [--detail] [--limit ] - [--sort-key ] [--sort-dir ] - [--fields [ ...]] - [--cell ] - -**Optional arguments:** - -.. option:: -c , --cell - - UUID of the cell that contains the desired list of devices. - -.. option:: --detail - - Show detailed information about the device. - -.. option:: --limit - - Maximum number of devices to return per request, 0 for no limit. Default - is the maximum number used by the Craton API Service. - -.. option:: --sort-key - - Device field that will be used for sorting. - -.. option:: --sort-dir - - Sort direction: “asc” (the default) or “desc”. - -.. option:: --fields [ ...] - - One or more device fields. Only these fields will be fetched from the - server. Can not be used when ‘-- detail’ is specified. - - -craton device-show ------------------- - -.. program:: craton device-show - -Show detailed information about a device. - -:: - - usage: craton device-show - -**Positional arguments:** - -.. option:: device - - UUID of the device. - - -craton device-update --------------------- - -.. program:: craton device-update - -Update information about a device. - -:: - - usage: craton device-update [-n ] - -**Positional arguments:** - -.. option:: device - - UUID of the device. - -**Optional arguments:** - -.. option:: -n , --name - - New name for the device. - - -craton host-create ------------------- - -.. program:: craton host-create - -Create a new host. - -:: - - usage: craton host-create [-n ] - [-t ] - [-a ] - [-u ] - [-p ] - [-r ] - [-c ] - [--note ] - [--access_secret ] - [-i ] - -**Optional arguments:** - -.. option:: -n , --name - - Name of the host. - -.. option:: -t , --type - - Type of host. - -.. option:: -a , --active - - Active or inactive state for a host: ‘true’ or ‘false’. - -.. option:: -u , --uuid - - UUID of the host. - -.. option:: -p , --project , --project_uuid - - UUID of the project that this host belongs to. - -.. option:: -r , --region , --region_uuid - - UUID of the region that this host belongs to. - -.. option:: -c , --cell , --cell_uuid - - UUID of the cell that this host belongs to. - -.. option:: --note - - Note about the host. - -.. option:: --access_secret - - UUID of the access secret of the host. - -.. option:: -i , --ip_address - - IP Address type of the host. - - -craton host-delete ------------------- - -.. program:: craton host-delete - -Delete a host. - -:: - - usage: craton host-delete - -**Positional arguments:** - -.. option:: host - - UUID of the host. - - -craton host-list ----------------- - -.. program:: craton host-list - -List the hosts. - -:: - - usage: craton host-list [--detail] [--limit ] - [--sort-key ] [--sort-dir ] - [--fields [ ...]] - [--cell ] - -**Optional arguments:** - -.. option:: -c , --cell - - UUID of the cell that contains the desired list of hosts. - -.. option:: --detail - - Show detailed information about the host. - -.. option:: --limit - - Maximum number of hosts to return per request, 0 for no limit. Default is - the maximum number used by the Craton API Service. - -.. option:: --sort-key - - Host field that will be used for sorting. - -.. option:: --sort-dir - - Sort direction: “asc” (the default) or “desc”. - -.. option:: --fields [ ...] - - One or more host fields. Only these fields will be fetched from the - server. Can not be used when ‘-- detail’ is specified. - - -craton host-show ----------------- - -.. program:: craton host-show - -Show detailed information about a host. - -:: - - usage: craton host-show - -**Positional arguments:** - -.. option:: host - - UUID of the host. - - -craton host-update ------------------- - -.. program:: craton host-update - -Update information about a host. - -:: - - usage: craton host-update [-n ] - -**Positional arguments:** - -.. option:: host - - UUID of the host. - -**Optional arguments:** - -.. option:: -n , --name - - New name for the host. - - -craton user-list ----------------- - -.. program:: craton user-list - -List the users in a project. - -:: - - usage: craton user-list [--detail] [--limit ] - [--sort-key ] [--sort-dir ] - [--fields [ ...]] - -**Optional arguments:** - -.. option:: --detail - - Show detailed information about the users. - -.. option:: --limit - - Maximum number of users to return per request, 0 for no limit. Default is - the maximum number used by the Craton API Service. - -.. option:: --sort-key - - User field that will be used for sorting. - -.. option:: --sort-dir - - Sort direction: “asc” (the default) or “desc”. - -.. option:: --fields [ ...] - - One or more user fields. Only these fields will be fetched from the - server. Can not be used when ‘-- detail’ is specified. diff --git a/doc/source/database.dot b/doc/source/database.dot deleted file mode 100755 index 5a7959e..0000000 --- a/doc/source/database.dot +++ /dev/null @@ -1,139 +0,0 @@ -digraph G -{ - size = "8,11"; - node [shape=plaintext] - - Project [label=< - - - - - - -
Project
+created_at : DateTime
+updated_at : DateTime
+id : Integer
+name : String
>]; - - Region [label=< - - - - - - - - -
Region
+created_at : DateTime
+updated_at : DateTime
+id : Integer
+project_id : Integer
+name : String
+note : String
>]; - - User [label=< - - - - - - - - - -
User
+created_at : DateTime
+updated_at : DateTime
+id : Integer
+project_id : Integer
+username : String
+is_admin : Boolean
+roles : JSONType
>]; - - Cell [label=< - - - - - - - - - -
Cell
+created_at : DateTime
+updated_at : DateTime
+id : Integer
+region_id : Integer
+project_id : Integer
+name : String
+note : String
>]; - - Device [label=< - - - - - - - - - - - - -
Device
+created_at : DateTime
+updated_at : DateTime
+id : Integer
+type : String
+name : String
+region_id : Integer
+project_id : Integer
+ip_address : IPAddressType
+active : Boolean
+note : String
>]; - - Label [label=< - - - - - - -
Label
+created_at : DateTime
+updated_at : DateTime
+id : Integer
+label : String
>]; - - Host [label=< - - - - - - - - - - - - - - -
Host
+created_at : DateTime
+updated_at : DateTime
+id : Integer
+type : String
+name : String
+region_id : Integer
+cell_id : Integer
+project_id : Integer
+ip_address : IPAddressType
+active : Boolean
+note : String
+access_secret_id : Integer
>]; - - AccessSecret [label=< - - - - - - -
AccessSecret
+created_at : DateTime
+updated_at : DateTime
+id : Integer
+cert : Text
>]; - - VariableCorrelation [label=< - - - - - - - - -
VariableCorrelation
+created_at : DateTime
+updated_at : DateTime
+id : Integer
+object_id : Integer
+object_type : String
+variable_id : Integer
>]; - - Variable [label=< - - - - - - -
Variable
+created_at : DateTime
+updated_at : DateTime
+id : Integer
+variable : String
>]; - - Project -> Region [dir=both headlabel=" *" fontsize="8" taillabel="1 "]; - Project -> User [dir=both headlabel="* " fontsize="8" taillabel="1 "]; - Project -> Cell [dir=both headlabel=" *" fontsize="8" taillabel="  1"]; - Region -> Cell [dir=both headlabel="* " fontsize="8" taillabel="1 "]; - Project -> Device [dir=both headlabel="* " fontsize="8" taillabel="1 "]; - Region -> Device [dir=both headlabel="* " fontsize="8" taillabel="1 "]; - Cell -> Device [dir=both headlabel="* " fontsize="8" taillabel="1 "]; - Label -> Device [dir=both headlabel="* " fontsize="8" taillabel="1 "]; - Device -> Host [dir=none]; - Host -> AccessSecret [dir=both headlabel="0..1 " fontsize="8" taillabel="1 "]; - VariableCorrelation -> Variable [dir=both headlabel="1 " fontsize="8" taillabel="1 "]; - VariableCorrelation -> Region [dir=both headlabel=" 0..1" fontsize="8" taillabel="* "]; - VariableCorrelation -> Cell [dir=both headlabel=" 0..1" fontsize="8" taillabel="* "]; - VariableCorrelation -> Device [dir=both headlabel=" 0..1" fontsize="8" taillabel="* "]; - VariableCorrelation -> Label [dir=both headlabel=" 0..1" fontsize="8" taillabel="* "]; - VariableCorrelation -> User [dir=both headlabel=" \n0..1" fontsize="8" taillabel="*"]; - VariableCorrelation -> Host [dir=both headlabel=" 0..1" fontsize="8" taillabel= "* "]; - - -} \ No newline at end of file diff --git a/doc/source/dev/install-keystone.rst b/doc/source/dev/install-keystone.rst deleted file mode 100644 index 2166581..0000000 --- a/doc/source/dev/install-keystone.rst +++ /dev/null @@ -1,52 +0,0 @@ -======================================================= -Setting up Craton with Keystone Using OpenStack-Ansible -======================================================= - -OpenStack-Ansible is an upstream project that uses Ansible to deploy and -configure production OpenStack from source. It also has the ability to deploy -an environment entirely on one machine like devstack. OpenStack-Ansible -(a.k.a., OSA) refers to these as AIOs (All In One). OSA's `Quick Start`_ -documentation describes how to build these. - -Once you have an AIO set-up, you need to create the Craton service user, add -the admin role to that user, set up the service and endpoints, and then you -need to do something a little unusual, depending on how you are developing -Craton. - -If you have OSA and craton on the same machine, then Craton should be able to -talk to what OSA calls it's "Internal LB VIP". This is usually an IP address -that looks like ``172.29.236.100``. In this case, you should be fine to then -start using Craton with Keystone authentication (assuming you've also followed -the instructions for using Craton with Keystone). - -If you do not have them on the same machine, then Craton will not be able to -access the "Internal LB VIP" because (as its name might suggest) it is -internal to that AIO. In that case, you need to use the openstack client to -edit the Admin endpoint for Keystone itself. By default, the admin endpoint -will be something like: ``http://172.29.236.100:35357/v3``. Since we're -talking to Keystone from outside that AIO we need it to be the same as the -public endpoint which will look like ``https://:5000/v3``. To -update that, we need to do this: - -.. code-block:: bash - - export ADMIN_ENDPOINT_ID="$(openstack endpoint list --service identity \ - --interface admin \ - -c ID \ - -f value)" - export PUBLIC_URL="$(openstack endpoint list --service identity \ - --interface admin \ - -c URL \ - -f value)" - openstack endpoint set --region RegionOne \ - --service identity \ - --url $PUBLIC_URL \ - --interface admin \ - $ADMIN_ENDPOINT_ID - -This ensures that ``keystonemiddleware`` will get the public IP address from -the service catalog when it needs to talk to the admin identity endpoint. - - -.. _Quick Start: - http://docs.openstack.org/developer/openstack-ansible/developer-docs/quickstart-aio.html diff --git a/doc/source/dev/install.rst b/doc/source/dev/install.rst deleted file mode 100755 index ef5dfbd..0000000 --- a/doc/source/dev/install.rst +++ /dev/null @@ -1,321 +0,0 @@ - -===================================================== -Installing and Setting up a Development Environment -===================================================== - -Installation -============ - -.. note:: *This is a Python3 project.* - -.. note:: *This project requires MySQL 5.7, until a stable release of MariaDB with JSON function support is available* - ---------------------- -Ubuntu 16.04 (Xenial) ---------------------- - - -* Install a fresh Ubuntu image - -* Make sure we have git installed:: - - # apt-get update - # apt-get install git -y - -* Clone the repository:: - - # git clone https://github.com/openstack/craton.git - -* Install the prerequisite packages:: - - # apt-get install python3.5 python3.5-dev - # apt-get install python3-pip python3-setuptools - # python3 -m pip install --upgrade pip setuptools - -* Goto craton directory and install the following:: - - # python3 -m pip install -r requirements.txt - # python3 -m pip install . - -* Install mysql-server and make sure mysql is running:: - - # apt-get install mysql-server-5.7 mysql-client-5.7 - # systemctl enable mysql - # systemctl start mysql - -* Ensure you have python3-mysqldb installed:: - - # apt-get install python3-mysqldb - --------- -CentOS 7 --------- - - -* Install a fresh CentOS 7 image - -* Make sure we have git installed:: - - # yum update - # yum install git -y - -* Clone the repository:: - - # git clone https://github.com/openstack/craton.git - -* Install the prerequisite packages:: - - # yum install python34-devel python34-pip python34-setuptools gcc - # python3 -m pip install --upgrade pip setuptools - -* Goto craton directory and install the following:: - - # python3 -m pip install -r requirements.txt - # python3 -m pip install . - -* Install mysql-server community release from `MySQL Community Page`_:: - - # wget https://dev.mysql.com/get/mysql57-community-release-el7-9.noarch.rpm - # rpm -ivh mysql57-community-release-el7-9.noarch.rpm - # yum install mysql-server - # systemctl enable mysqld - # systemctl start mysqld - -* Ensure you have MySQL-python installed:: - - # yum install MySQL-python - -* Setup Database User and secure installation:: - - # grep 'temporary password' /var/log/mysqld.log - # mysql_secure_installation - ---------- -Fedora 25 ---------- - - -* Install a fresh Fedora 25 image - -* Make sure we have git installed:: - - # dnf update - # dnf install git -y - -* Clone the repository:: - - # git clone https://github.com/openstack/craton.git - -* Install the prerequisite packages:: - - # dnf install python3-devel python3-pip python3-setuptools gcc redhat-rpm-config - # python3 -m pip install --upgrade pip setuptools - -* Goto craton directory and install the following:: - - # python3 -m pip install -r requirements.txt - # python3 -m pip install . - -* Install mysql-server and make sure mysql is running:: - - # dnf install mysql-server - # systemctl enable mysqld - # systemctl start mysqld - -* Ensure you have python3-mysql installed:: - - # dnf install python3-mysql - --------------- -Database Setup --------------- - -* Connect to database server as root user:: - - # mysql -u root -p - -* Create user craton:: - - # CREATE USER 'craton'@'localhost' IDENTIFIED BY 'craton'; - -* Grant proper access to the craton user and flush privileges:: - - # GRANT ALL PRIVILEGES ON craton.* TO 'craton'@'localhost' - identified by 'craton'; - # FLUSH PRIVILEGES; - -* You can verify that the user was added by calling:: - - # select host, user, password from mysql.user; - -* Create the Craton database:: - - # create database craton CHARACTER SET='utf8'; - -* Logout from the database server:: - - # exit - ------------------------------------- -Create etc/craton-api-conf.dev ------------------------------------- -* Copy the sample config in the etc directory to make a development config file. - - # cp craton-api-conf.sample craton-api-conf.dev - -* Make api_paste_config use a fully qualified path (not relative). - This will be specific for your machine - -.. note:: Make sure you have the proper path for craton-api-conf.dev - - # api_paste_config=/home/cratonuser/craton/etc/craton-api-paste.ini - -* Add the following line to the [database] section: - - # connection = mysql+pymysql://craton:craton@localhost/craton - -* Update the host in the [api] section to match your IP: - - # host = xxx.xxx.xxx.xxx - ----------- -Run dbsync ----------- - -* Make sure to run dbsync to get the db tables created:: - - # craton-dbsync --config-file=etc/craton-api-conf.dev version - # craton-dbsync --config-file=etc/craton-api-conf.dev upgrade - -* Make sure to run dbsync bootstrap to create initial project and root user:: - # craton-dbsync --config-file=etc/craton-api-conf.dev bootstrap - - Note: The above command outputs user, project-id and API key to use with - python-cratonclient to interact with craton server. - ---------------------- -Start the API Service ---------------------- - -* To start the API service, run the following command:: - - # craton-api --config-file=etc/craton-api-conf.dev - - -* Some examples of API calls are as below: - ---------------- -Create a Region ---------------- - -* In order to create the region, export the IP address you set in - /etc/craton-api-conf.dev:: - - # export MY_IP=xxx.xxx.xxx.xxx - -* Next create a cloud to which the region is associated to:: - - # curl -i "http://${MY_IP}:7780/v1/clouds" \ - -d '{"name": "Cloud_Sample"}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -* To create region, execute the following command:: - - # curl -i "http://${MY_IP}:7780/v1/regions" \ - -d '{"name": "DFW", "cloud_id": 1}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - ------------------- -Get created Region ------------------- - -* To get the created region, execute the following command:: - - # curl -i "http://${MY_IP}:7780/v1/regions" \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - --------------------------- -Get all hosts for Region 1 --------------------------- - -* To get all hosts for region 1, execute the following command:: - - # curl -i "http://${MY_IP}:7780/v1/hosts?region_id=1" \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - ---------------------- -Get a particular host ---------------------- - -* To get a particular host, execute the following command:: - - # curl -i "http://${MY_IP}:7780/v1/hosts/33" \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - ------------------------ -Using wrapper functions ------------------------ - -Some wrapper functions have been included in craton/tools to quickly build, reload, populate, and query craton. - -* To load the wrapper functions, run the following in the craton parent directory:: - - # source tools/wrapper-functions.sh - -* To start craton directly, run the following from the craton parent directory:: - - # craton-direct-start - -* The following environment variables must be exported for working with the craton API server:: - - * CRATON_URL - * OS_PROJECT_ID - * OS_USERNAME - * OS_PASSWORD - -* You can search the logs for these values or run:: - - # export eval $(craton-direct-env) - -* Populate craton with fake data by running:: - - # craton-fake-data - -* Run API calls against craton with the following wrappers:: -.. note:: *Requires the installation of httpie* - - # craton-post v1/regions name=HKG - # craton-get v1/hosts - # craton-put v1/hosts/3 device_type=container - # craton-put v1/hosts/3/variables foo=47 bar:='["a", "b", "c"]' - # craton-delete v1/hosts/4 - -------------- -Running Tests -------------- - -* To run unit tests, execute the following command:: - - # tox - -* To run functional tests, execute the following command:: - - # tox -e functional - -.. _MySql Community Page: - https://dev.mysql.com/downloads/repo/yum/ diff --git a/doc/source/docker-install.rst b/doc/source/docker-install.rst deleted file mode 100755 index 7774449..0000000 --- a/doc/source/docker-install.rst +++ /dev/null @@ -1,136 +0,0 @@ - -======================= -Installing using Docker -======================= - -Installation -============ - -------------------------------------- -Installing necessary packages: Ubuntu -------------------------------------- - - -* Make sure git is installed:: - - $ sudo apt-get update - $ sudo apt-get install git -y - -* Clone the Craton repository:: - - $ git clone https://github.com/openstack/craton.git - -* To install Docker, follow the instructions found here: - https://docs.docker.com/engine/installation/linux/ubuntulinux/ - - -------------------------------------------------- -Installing necessary packages: Fedora/CentOS etc. -------------------------------------------------- - - -* Install a fresh Fedora/CentOS image - -* Make sure we have git installed:: - - $ sudo yum update - $ sudo yum install git -y - -* Clone the repository:: - - $ git clone https://github.com/openstack/craton.git - -* Follow the correct Docker install guide for your operating system:: - - Fedora: https://docs.docker.com/engine/installation/linux/fedora/ - CentOS: https://docs.docker.com/engine/installation/linux/centos/ - - ---------------------------- -Run the Craton Docker Image ---------------------------- - -* First, go to craton directory and build the Docker image:: - - $ sudo docker build --pull -t craton-api:latest . - -* And finally, run the docker image:: - - $ sudo docker run -t --name craton-api -d craton-api:latest - - -------------------- -Calling into Craton -------------------- - -* Let's get container Id:: - - $ ContainerId=$(docker ps | grep craton-api:latest | awk '{print $1}') - -* We need the container IP, so we can run an API call against Craton running in the container:: - - $ ContainerIP=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' ${ContainerId}) - -* Bootstrap credentials are generated at the top of the craton-api logs for initial authentication. You can manually copy the username, api key, and project id from the logs by running:: - - $ docker logs -f craton-api - - Or you can grep for them:: - - $ CRATON_PROJECT_ID=$(docker logs craton-api | grep "ProjectId:" | awk '{print $2}' | tr -d '\r') - $ CRATON_USERNAME=$(docker logs craton-api | grep "Username:" | awk '{print $2}' | tr -d '\r') - $ CRATON_API_KEY=$(docker logs craton-api | grep "APIKey:" | awk '{print $2}' | tr -d '\r') - -* To generate a sample data set, use the following command:: - - $ python tools/generate_fake_data.py --url http://${ContainerIP}:7780/v1 --user "$CRATON_USERNAME" --project "$CRATON_PROJECT_ID" --key "$CRATON_API_KEY" - -* Now you can run a curl command like the one below to query Craton:: - - $ curl -i "http://${ContainerIP}:7780/v1/hosts?region_id=1" -H "Content-Type: application/json" -H "X-Auth-Token: ${CRATON_API_KEY}" -H "X-Auth-User: ${CRATON_USERNAME}" -H "X-Auth-Project: ${CRATON_PROJECT_ID}" - ------------------------ -Using wrapper functions ------------------------ - -*Some wrapper functions have been included in craton/tools to quickly build, reload, populate, and query craton. -* To load the wrapper functions, run the following in the craton parent directory:: - - # source tools/wrapper-functions.sh - -* To quick start and populate craton in docker, run the following from the craton parent directory:: - - # craton-docker-start - -* In order to interact with craton, export the bootstrap credentials by running:: - - # export eval $(craton-docker-env) - -* Populate craton with fake data by running:: - - # craton-fake-data - -* Run API calls against craton with the following wrappers:: -.. note:: *Requires the installation of httpie* - - # craton-post v1/regions name=HKG - # craton-get v1/hosts - # craton-put v1/hosts/3 device_type=container - # craton-put v1/hosts/3/variables foo=47 bar:='["a", "b", "c"]' - # craton-delete v1/hosts/4 - -------------------- -Command Cheat-Sheet -------------------- - -* Get the Craton logs:: - - $ docker logs -f craton-api - -* Open mysql in the Craton container:: - - $ docker exec -it craton-api mysql -ucraton -pcraton craton - -* Get a bash shell from the Craton container:: - - $ docker exec -it craton-api bash # for a bash shell, etc diff --git a/doc/source/filtering-by-variables.rst b/doc/source/filtering-by-variables.rst deleted file mode 100644 index e71dd82..0000000 --- a/doc/source/filtering-by-variables.rst +++ /dev/null @@ -1,196 +0,0 @@ -.. _filtering-by-variables: - -================================ -Filtering Resources by Variables -================================ - -This describes how to use variable queries when listing resources. This feature -uses a subset of JSON Path supported by `MySQL 5.7`_. Most notably, we do not -support the :code:`doubleAsterisk` component. - -Supported Syntax -================ - -A variable query in our API consists of two main parts, separated by a colon -(:code:`:`): - -1. The JSON path -2. The JSON value - -You may supply as many of these as you like with each discrete query separated -by a comma (:code:`,`). For example, the following would all be valid queries -against the Craton API: - -.. code-block:: text - - GET /v1/hosts?vars=hardware_profiles.disks[*].manufacturer:"Seagate" - -and - -.. code-block:: text - - GET /v1/hosts?vars="os-information".release.version:"4.4.0",hardware.core_count:12 - - -Path -^^^^ - -The JSON Path expression is a series of path legs separated by a period ('.'). -Each path leg can consist of the following components: - -- A key, which can be either: - - - An `ECMAScript identifier`_, such as :code:`hardware_profiles` or - :code:`release`. - - - A JSON_ string, such as :code:`"hyphenated-key"` or - :code:`"this-is-a-json-string"` - -- A key and an array wildcard or specific index, like :code:`foo[*]`, - :code:`foo.bar[*].key`, or :code:`foo[3]` - -- A wildcard character (:code:`*`), to specify all keys at this hierachical - level, e.g. : :code:`foo.*.baz` - - -Value -^^^^^ - -The value portion of the query can consist of the following JSON data types: - -- A JSON_ string, e.g. :code:`"this-is-a-json-string"` - -- A JSON_ boolean, i.e. :code:`true` or :code:`false` - -- A JSON_ null, i.e. :code:`null` - -- A JSON_ integer, e.g. :code:`42` - -- A JSON_ float, e.g. :code:`3.14` - -Putting it All Together -======================= - - -Example 1 -^^^^^^^^^ - -With this syntax, you can express powerful variable filters that afford for -searching through nested metadata on a resource. Here's a quick example to -illustrate the usefulness of this feature. Let's take some arbitrary hardware -data that's been stored for each of our hosts: - -.. code-block:: json - - { - "hardware_profiles": { - "disks": [ - { - "manufacturer": "Seagate", - "capacity_quantity": 2, - "capacity_unit": "TB" - }, - { - "manufacturer": "Western Digital", - "capacity_quantity": 3, - "capacity_unit": "TB" - } - ] - } - } - - -Now, let's say we want to find all of the hosts with a Seagate disk, one could -accomplish this with the following query: - -.. code:: text - - GET /v1/hosts?vars=hardware_profiles.disks[*].manufacturer:"Seagate" - - -Example 2 -^^^^^^^^^ - -As another example, let's say we're a root user for Craton (meaning we have -access across projects) - what if we wanted to get all hosts that are in, say, -any Region that is in some specific data center and the way we're representing -that on the Region resource(s) is: - -.. code-block:: json - - { - "datacenter_info": { - "id": 543, - "name": "DFW_DC_0" - } - } - -Because of how variables are inherited by child resources, we could query for -all of these hosts by simply querying like so: - -.. code-block:: text - - GET /v1/hosts?vars=datacenter_info.id:543 - - -Limitations and Schema Considerations -===================================== - -Known Limitations -^^^^^^^^^^^^^^^^^ - -- Because MySQL 5.7 does not support slicing arrays (:code:`foo[4:10]`, for - instance), we do not support them in Craton. - -- Although MySQL 5.7 does support the double-asterisk (:code:`prefix**suffix`) - in its syntax, we do not. This is due to how `jsonpath-rw`, the library we - use for parsing the API response, doesn't include the double-asterisk in its - JSON path flavor. - -- The first key in the path must be known, because it does not participate in - the JSON column search. It is a separate field altogether, really, but we - allow one to append it to the beginning for convenience in the syntax. - -- You cannot use a colon (:code:`:`) in your JSON path or JSON value, since - that is reserved for parsing the query itself. - -- You cannot use a comma (:code:`,`) in your JSON path or JSON value, since - that is reserved for parsing the query itself. - -- When no rows are in the Variables table, JSON Path validation does not occur - at the DB. - -Schema Considerations -^^^^^^^^^^^^^^^^^^^^^ - -We do not support wildcard values in the Value portion of the variables query. -Therefore, it's a good idea to parse and store your data in a more consistent -and normalized manner. For instance, take the output of a -:bash:`uname` command in Linux, we'll use -:code:`Linux development 4.4.0-66-generic #87-Ubuntu SMP Fri Mar 3 15:29:05 UTC 2017 x86_64 x86_64 x86_64 GNU/Linux` -as an example. One could parse this (or, preferably, use the variety of -:bash:`uname` flags that are available) and get several values from it. You may -want to store them in a variable on hosts as something like: - -.. code-block:: json - - { - "hardware": { - "architecture": "x86_64", - ... - }, - "os": { - "details": "Linux development 4.4.0-66-generic #87-Ubuntu SMP Fri Mar 3 15:29:05 UTC 2017 x86_64 x86_64 x86_64 GNU/Linux", - "distribution": "Ubuntu", - "kernel": { - "type": "Linux", - "version": "4.4.0-66-generic", - ... - } - } - } - - -.. _`MySQL 5.7`: https://dev.mysql.com/doc/refman/5.7/en/json-path-syntax.html -.. _`ECMAScript Identifier`: https://www.ecma-international.org/ecma-262/5.1/#sec-7.6 -.. _JSON: http://www.json.org/ diff --git a/doc/source/high-level-design.rst b/doc/source/high-level-design.rst deleted file mode 100755 index 79db56d..0000000 --- a/doc/source/high-level-design.rst +++ /dev/null @@ -1,157 +0,0 @@ -Inventory -========= - -Concepts --------- - -The fundamental unit of inventory in Craton is a **device**, which has -the following characteristics: - -Configurability - A device is configurable, either directly over SSH with tooling - like Ansible or indirectly via some controller. - - Configuration can be further divided as follows: - - * Version-controlled configuration, often captured in - playbooks. It is the responsibility of workflows to use such - configuration. - - * Config data stored in the inventory schema. - - In either case, the ultimate source may be manual, programmatic, - or a combination of the two. - -Addressability - A device has an IP address (specifically the control plane IP - address). - -Hierarchy - Devices are part of a hierarchy of regions and cells; a region in - turn is part of a project. - -Labels - Devices may be arbitrarily labeled. Such labels can describe - physical layout -- cabinet, networking, power, etc -- along with - logical usage -- compute service, etc. Labels are not in a - hierarchy. (But should they be?) - - Some systems like Kubernetes use a key/value scheme for - labels. This can be readily supported by the convention - ``key:value`` for the label name. - -A **host** is a concrete subclass of device, and corresponds to the -equivalent Ansible concept: a host is directly configurable by an -Ansible playbook over SSH. Hosts have associated SSH keys for such -access. - -**Principals** interact (read and/or write) with inventory about -devices. Governance for this interaction is mediated by RBAC and -logged with respect to **change records** (including the reason for -the change). Third party governance systems like OneOps can further -structure these interactions. There are two classes of principals: - -Workflows - Capture audits about inventory; which are in turn used for any - remediation. The pattern of usage is bottom-up. - - Workflows are pluggable. They can be refined to a number of - levels: Ansible, OpenStack Ansible, a specific workflow for - managing patch levels with OSA (TODO does that actually make - sense for OSA?). - - Note that a workflow can be run any number of times, against - different subsets of inventory. Example: migrate a specific - cabinet, as specified by a label. Think of this distinction as - being like an Ansible playbook vs its play. - - Because workflows also know about version-controlled config, - perhaps they can be used for queries as well. (TODO Ansible has - some limited ways of determining such variables; it's possible OSA - might develop this further as well in terms of role-based scheme.) - -Users - Configure and query inventory. The pattern of usage is top-down, - whether that's configuring a specific label or drilling down from - a given cell. - - Users also can run workflows. This capability implies that - workflows can be linked to roles; and that permissions include - being the ability to run workflows. - -Inventory interactions can be optionally logged. For example, if -inventory is configured to use MySQL with InnoDB as its backing store, -then all changes can be captured in the write-ahead log and reliably -streamed to Kafka for analysis. - -Associated with each region, cell, label, and device are -**variables**. Here are some aspects of variables: - -Description - Variables are used to describe a device with config, auditing - (possibly a subset of discovered facts), and other - information. However variables do not store logging, metric, or - monitoring information --- because of volume, such storage is best - done in a separate database, such as timeseries DB. - -Key/value pairs - Variables are made of up key/value pairs. - -Key - Keys are strings; they are additionally restricted to be valid - Python identifiers. We usually refer to these as **top-level - keys**, because values can be arbitrarily complex JSON values. - - Such keys, and their prefixes, also serve as roles in Craton's - implementation of RBAC. Keys are in a single namespace that does - not differentiate between config or audit variables. - -Value - Values are of JSON type, and can store arbitrary data as such, - including binary data via base64 encoding. Workflows define these - specifics. - -Scope resolution - Variables use hierarchical scope to resolve for a specific device, - using the following ordering: - - 1. Region - 2. Cell - 3. Label; if a device has multiple labels, the labels are sorted - alphanumerically - 4. Device - - Such resolution overrides at the lowest defined level, which - allows for variables to describe a device with the "broadest - possible brush". Overrides do not merge values, even if the value - has keys embedded in it. - - In general, config variables should be set at the highest - possible level; whereas audit data should be bottom up from - device. - -Metadata - Variables are also associated with the actor that wrote - them, along with a record of change, including a note describing - the change. - - It may be desirable to track other metadata about a variable: is - this intended for config, vs discovered from an audit? But note - this might be just a question of which actor wrote this variable: - was it a user? (Config.) Or was it a workflow? (Audit/remediation, - possibly further identified by workflow metadata.) - - -Implementation --------------- - -Craton's inventory is modeled using Python objects, which in turn has -a concrete reference implementation using SQLAlchemy: - -.. graphviz:: database.dot - - -TODO(jimbaker): implementation of the inventory concepts is a work in -progress, however, the above schema represents the current -implementation. Notably missing are principals, including workflows -and users, which will be added in the next phase of work. diff --git a/doc/source/hosts.rst b/doc/source/hosts.rst deleted file mode 100644 index 8783a05..0000000 --- a/doc/source/hosts.rst +++ /dev/null @@ -1,498 +0,0 @@ -.. _hosts: - -===== -Hosts -===== - -Definition of host - -Create Host -=========== - -:POST: /v1/hosts - -Create a new host - -Normal response codes: OK(201) - -Error response codes: invalid request(400), validation exception(405) - -Request -------- - -+------------+------+---------+-------------------------------+ -| Name | In | Type | Description | -+============+======+=========+===============================+ -| name | body | string | Unique name of the host | -+------------+------+---------+-------------------------------+ -| cell_id | body | integer | Unique ID of the host's cell | -+------------+------+---------+-------------------------------+ -| region_id | body | integer | Unique ID of the host's region| -+------------+------+---------+-------------------------------+ -| parent_id | body | integer | ID of the host's parent | -+------------+------+---------+-------------------------------+ -| ip_address | body | string | IP address of the host | -+------------+------+---------+-------------------------------+ -| device_type| body | string | Type of host | -+------------+------+---------+-------------------------------+ -| active | body | boolean | State of host | -+------------+------+---------+-------------------------------+ -| labels | body | string | User defined labels | -+------------+------+---------+-------------------------------+ -| note | body | string | Note used for governance | -+------------+------+---------+-------------------------------+ -| variables | body | object | User defined variables | -+------------+------+---------+-------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Example Host Create -******************* - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/hosts" \ - -d '{"name": "fooHost", "region_id": 1, "ip_address": "11.11.11.14", "device_type": "Phone"}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+------------+------+---------+-------------------------------+ -| Name | In | Type | Description | -+============+======+=========+===============================+ -| host | body | object | - id | -| | | | - name | -| | | | - cell_id | -| | | | - region_id | -| | | | - parent_id | -| | | | - ip_address | -| | | | - device_type | -| | | | - active | -| | | | - labels | -| | | | - note | -| | | | - variables | -+------------+------+---------+-------------------------------+ -| id | body | integer | Unique ID of the host | -+------------+------+---------+-------------------------------+ -| name | body | string | Unique name of the host | -+------------+------+---------+-------------------------------+ -| cell_id | body | integer | Unique ID of the host's cell | -+------------+------+---------+-------------------------------+ -| region_id | body | integer | Unique ID of the host's region| -+------------+------+---------+-------------------------------+ -| parent_id | body | integer | ID of the host's parent | -+------------+------+---------+-------------------------------+ -| ip_address | body | string | IP address of the host | -+------------+------+---------+-------------------------------+ -| device_type| body | string | Type of host | -+------------+------+---------+-------------------------------+ -| active | body | boolean | State of host | -+------------+------+---------+-------------------------------+ -| labels | body | string | User defined labels | -+------------+------+---------+-------------------------------+ -| note | body | string | Note used for governance | -+------------+------+---------+-------------------------------+ -| variables | body | object | User defined variables | -+------------+------+---------+-------------------------------+ - -Examples Host Create -******************** - -.. code-block:: json - - { - "active": true, - "cell_id": null, - "device_type": "Phone", - "id": 1, - "ip_address": "11.11.11.14", - "name": "fooHost", - "note": null, - "parent_id": null, - "region_id": 1 - } - -List Hosts -========== - -:GET: /v1/hosts?region_id= - -Gets all Host - -Normal response codes: OK(200) - -Error response codes: invalid request(400), host not found(404), validation exception(405) - -Default response: unexpected error - -Request -------- - -+------------+------+---------+---------+------------------------------+ -| Name | In | Type | Required| Description | -+============+======+=========+=========+==============================+ -| region_id | query| integer | Yes | ID of the region to get hosts| -+------------+------+---------+---------+------------------------------+ -| limit | query| integer | No | Number of host to return | -| | | | | Ranging from 1 - 10000 | -+------------+------+---------+---------+------------------------------+ -| name | query| string | No | Name of the host to get | -+------------+------+---------+---------+------------------------------+ -| cell_id | query| integer | No | Name of the cell to get | -+------------+------+---------+---------+------------------------------+ -| ip | query| string | No | IP address of the host to get| -+------------+------+---------+---------+------------------------------+ -| device_type| query| string | No | Type of host to get | -+------------+------+---------+---------+------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Examples Host List -****************** - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/hosts?region_id=1" \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+------------+------+---------+-------------------------------+ -| Name | In | Type | Description | -+============+======+=========+===============================+ -| hosts | body | array | array of host | -+------------+------+---------+-------------------------------+ -| id | body | integer | Unique ID of the host | -+------------+------+---------+-------------------------------+ -| name | body | string | Unique name of the host | -+------------+------+---------+-------------------------------+ -| cell_id | body | integer | Unique ID of the host's cell | -+------------+------+---------+-------------------------------+ -| region_id | body | integer | Unique ID of the host's region| -+------------+------+---------+-------------------------------+ -| parent_id | body | integer | ID of the host's parent | -+------------+------+---------+-------------------------------+ -| ip_address | body | string | IP address of the host | -+------------+------+---------+-------------------------------+ -| device_type| body | string | Type of host | -+------------+------+---------+-------------------------------+ -| active | body | boolean | State of host | -+------------+------+---------+-------------------------------+ -| labels | body | string | User defined labels | -+------------+------+---------+-------------------------------+ -| note | body | string | Note used for governance | -+------------+------+---------+-------------------------------+ -| variables | body | object | User defined variables | -+------------+------+---------+-------------------------------+ - -Examples Host List -****************** - -.. code-block:: json - - [ - { - "active": true, - "cell_id": null, - "device_type": "Computer", - "id": 2, - "ip_address": "12.12.12.15", - "name": "foo2Host", - "note": null, - "parent_id": null, - "region_id": 1 - }, - { - "active": true, - "cell_id": null, - "device_type": "Phone", - "id": 1, - "ip_address": "11.11.11.14", - "name": "fooHost", - "note": null, - "parent_id": null, - "region_id": 1 - }, - ] - -.. todo:: **Example Unexpected Error** - - ..literalinclude:: ./api_samples/errors/errors-unexpected-resp.json - :language: javascript - -Update Hosts -============ - -:PUT: /v1/hosts/{id} - -Update an existing host - -Normal response codes: OK(200) - -Error response codes: invalid request(400), host not found(404), validation exception(405) - -Request -------- - -+------------+------+---------+------------------------------------+ -| Name | In | Type | Description | -+============+======+=========+====================================+ -| name | body | string | Unique name of the host | -+------------+------+---------+------------------------------------+ -| cell_id | body | integer | Unique ID of the host's cell | -+------------+------+---------+------------------------------------+ -| region_id | body | integer | Unique ID of the host's region | -+------------+------+---------+------------------------------------+ -| parent_id | body | integer | ID of the host's parent | -+------------+------+---------+------------------------------------+ -| ip_address | body | string | IP address of the host | -+------------+------+---------+------------------------------------+ -| device_type| body | string | Type of host | -+------------+------+---------+------------------------------------+ -| active | body | boolean | State of host | -+------------+------+---------+------------------------------------+ -| labels | body | string | User defined labels | -+------------+------+---------+------------------------------------+ -| note | body | string | Note used for governance | -+------------+------+---------+------------------------------------+ -| variables | body | object | User defined variables | -+------------+------+---------+------------------------------------+ -| id | path | integer | Unique ID of the host to be updated| -+------------+------+---------+------------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Examples Host Update -******************** - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/hosts/2" \ - -XPUT \ - -d '{"name": "changedName"}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+------------+------+---------+-------------------------------+ -| Name | In | Type | Description | -+============+======+=========+===============================+ -| host | body | object | - id | -| | | | - name | -| | | | - cell_id | -| | | | - region_id | -| | | | - parent_id | -| | | | - ip_address | -| | | | - device_type | -| | | | - active | -| | | | - labels | -| | | | - note | -| | | | - variables | -+------------+------+---------+-------------------------------+ -| id | body | integer | Unique ID of the host | -+------------+------+---------+-------------------------------+ -| name | body | string | Unique name of the host | -+------------+------+---------+-------------------------------+ -| cell_id | body | integer | Unique ID of the host's cell | -+------------+------+---------+-------------------------------+ -| region_id | body | integer | Unique ID of the host's region| -+------------+------+---------+-------------------------------+ -| parent_id | body | integer | ID of the host's parent | -+------------+------+---------+-------------------------------+ -| ip_address | body | string | IP address of the host | -+------------+------+---------+-------------------------------+ -| device_type| body | string | Type of host | -+------------+------+---------+-------------------------------+ -| active | body | boolean | State of host | -+------------+------+---------+-------------------------------+ -| labels | body | string | User defined labels | -+------------+------+---------+-------------------------------+ -| note | body | string | Note used for governance | -+------------+------+---------+-------------------------------+ -| variables | body | object | User defined variables | -+------------+------+---------+-------------------------------+ - -Example Host Update -******************* - -.. code-block:: json - - { - "active": true, - "cell_id": null, - "device_type": "Computer", - "id": 2, - "ip_address": "12.12.12.15", - "name": "changedName", - "note": null, - "project_id": "717e9a21-6e2d-44e0-bc84-8398563bda06", - "region_id": 1 - } - -Update Host variables -===================== - -:PUT: /v1/hosts/{id}/variables - -Update user defined variables for the host - -Normal response codes: OK(200) - -Error response codes: invalid request(400), host not found(404), validation exception(405) - -Request -------- - -+--------+------+---------+------------------------------------+ -| Name | In | Type | Description | -+========+======+=========+====================================+ -| key | body | string | Identifier | -+--------+------+---------+------------------------------------+ -| value | body | object | Data | -+--------+------+---------+------------------------------------+ -| id | path | integer | Unique ID of the host to be updated| -+--------+------+---------+------------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Example Host Variables Update -***************************** - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/hosts/1/variables" \ - -XPUT \ - -d '{"newVar": "sample variable"}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+--------+------+---------+-------------------------+ -| Name | In | Type | Description | -+========+======+=========+=========================+ -| key | body | string | Identifier | -+--------+------+---------+-------------------------+ -| value | body | object | Data | -+--------+------+---------+-------------------------+ - -Example Host Variables Update -***************************** - -.. code-block:: json - - { - "variables": - { - "newVar": "sample variable" - } - } - -Delete Host -=========== - -:DELETE: /v1/hosts/{id} - -Deletes an existing record of a Host - -Normal response codes: no content(204) - -Error response codes: invalid request(400), host not found(404) - -Request -------- - -+--------+------+---------+------------------------------------+ -| Name | In | Type | Description | -+========+======+=========+====================================+ -| id | path | integer | Unique ID of the host to be deleted| -+--------+------+---------+------------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE - -Delete Host Variables -===================== - -:DELETE: /v1/hosts/{id}/variables - -Delete existing key/value variables for the Host - -Normal response codes: no content(204) - -Error response codes: invalid request(400), host not found(404) validation exception(405) - -Request -------- - -+--------+------+---------+-------------------------+ -| Name | In | Type | Description | -+========+======+=========+=========================+ -| id | path | integer | Unique ID of the host | -+--------+------+---------+-------------------------+ -| key | body | string | Identifier to be deleted| -+--------+------+---------+-------------------------+ -| value | body | object | Data to be deleted | -+--------+------+---------+-------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE diff --git a/doc/source/index.rst b/doc/source/index.rst deleted file mode 100755 index d13dd4f..0000000 --- a/doc/source/index.rst +++ /dev/null @@ -1,54 +0,0 @@ -.. craton documentation master file, created by - sphinx-quickstart on Tue Jul 9 22:26:36 2013. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to Craton's documentation! -================================== - -Craton is a new project planned for OpenStack inclusion. -Craton supports deploying and operating OpenStack clouds by providing -scalable fleet management: - -* Inventory of configurable physical devices/hosts (the fleet) -* Audit and remediation workflows against this inventory -* REST APIs, CLI, and Python client to manage - -Support for workflows, CLI, and the Python client is in progress. - - - -Getting Started -=============== -.. toctree:: - :maxdepth: 1 - - installation - keystone - cratoncli - usage - - -Developer Guide -=============== - -.. toctree:: - :maxdepth: 1 - - specs/index - contributing - architecture - api-reference - high-level-design - - -Indices and Tables -================== - -* :ref:`genindex` -* :ref:`search` - - -License -======= -Craton is licensed under the `Apache license `_ diff --git a/doc/source/installation.rst b/doc/source/installation.rst deleted file mode 100755 index ce22ba7..0000000 --- a/doc/source/installation.rst +++ /dev/null @@ -1,31 +0,0 @@ -============ -Installation -============ - -There are several ways that you can install Craton. If you're just -getting started, it is recommended that you start with a Docker install. - -Docker Install --------------- - -.. toctree:: - :maxdepth: 1 - - ./docker-install - -Basic Install -------------- - -(Optional) install virtualenv if desired:: - - $ mkvirtualenv craton - $ pip install -r /craton/requirements.txt - $ python setup.py install - -Setup Developer Environment ---------------------------- - -.. toctree:: - :maxdepth: 1 - - ./dev/install diff --git a/doc/source/keystone.rst b/doc/source/keystone.rst deleted file mode 100644 index 8ccf014..0000000 --- a/doc/source/keystone.rst +++ /dev/null @@ -1,72 +0,0 @@ -=========================== -Using Keystone for Identity -=========================== - -By default, Craton uses it's own local authentication mechanism. It also -supports using Keystone for identity and authentication. - -Before you can proceed, you need to first create a user for Craton, e.g., - -.. code-block:: bash - - openstack user create --project service \ - --description 'Craton Service User' \ - --password-prompt \ - --enable \ - craton - -And then you must add the admin role to it: - -.. code-block:: bash - - openstack role add --user craton \ - --project service \ - admin - -And then you must create the service and endpoints: - -.. code-block:: bash - - openstack service create --description 'Craton Fleet Management' \ - --name 'craton' \ - --enable \ - fleet_management - for endpoint_type in "admin internal public" ; do - openstack endpoint create \ - fleet_management $endpoint_type http://:/v1 \ - --region RegionOne - done - -Then you need to select the ``keystone-auth`` pipeline and configure the usual -Keystone auth token middleware options in the Craton API config file, e.g., - -.. code-block:: ini - - [api] - # ... - paste_pipeline = keystone-auth - - [keystone_authtoken] - auth_uri = https://:5000 - auth_url = https://:35357/v3 - project_name = service - username = craton - password = aVery_Secure&Complex+Password - project_domain_id = default - user_domain_id = default - auth_type = password - -You may need to either not use ``https`` in your URL or set ``insecure = -True`` to avoid SSL errors. - -Now with an appropriate identity in Keystone, one can use either the python -craton client or another client that can retrieve tokens from Keystone. For -example, if you use the openstack client to grab a token, you can use curl to -talk to Craton: - -.. code-block:: bash - - export AUTH_TOKEN="$(openstack token issue -c id -f value)" - curl -i \ - -H"X-Auth-Token: $AUTH_TOKEN" \ - http://:/v1/hosts?region_id=1 diff --git a/doc/source/network-devices.rst b/doc/source/network-devices.rst deleted file mode 100644 index 7a066d3..0000000 --- a/doc/source/network-devices.rst +++ /dev/null @@ -1,544 +0,0 @@ -.. _network-devices: - -============== -Network Device -============== - -Definition of network device - -Create Network Device -===================== - -:POST: /v1/network-devices - -Create a new network device - -Normal response codes: created(201) - -Error response codes: invalid request(400), validation exception(405) - -Request -------- - -+-----------------+------+---------+-------------------------------------------------+ -| Name | In | Type | Description | -+=================+======+=========+=================================================+ -| created_at | body | string | Timestamp of network device creation | -+-----------------+------+---------+-------------------------------------------------+ -| updated_at | body | string | Timestamp of last network device update | -+-----------------+------+---------+-------------------------------------------------+ -| hostname | body | string | Name of the host of the device | -+-----------------+------+---------+-------------------------------------------------+ -| id | body | integer | Unique ID of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| cell_id | body | integer | Unique ID of the network device's cell | -+-----------------+------+---------+-------------------------------------------------+ -| region_id | body | integer | Unique ID of the network device's region | -+-----------------+------+---------+-------------------------------------------------+ -| parent_id | body | integer | ID of the network device's parent | -+-----------------+------+---------+-------------------------------------------------+ -| ip_address | body | string | IP address of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| device_type | body | string | Type of device | -+-----------------+------+---------+-------------------------------------------------+ -| model_name | body | string | Model name of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| os_version | body | string | Operating system version of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| vlans | body | string | virtual local area networks of the device | -+-----------------+------+---------+-------------------------------------------------+ -| interface_id | body | integer | Unique ID of the interface of the device | -+-----------------+------+---------+-------------------------------------------------+ -| network_id | body | integer | Unique ID of the network of the device | -+-----------------+------+---------+-------------------------------------------------+ -| active | body | boolean | State of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| labels | body | string | User defined labels | -+-----------------+------+---------+-------------------------------------------------+ -| note | body | string | Note used for governance | -+-----------------+------+---------+-------------------------------------------------+ -| variables | body | object | User defined variables | -+-----------------+------+---------+-------------------------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Example Network Device Create -***************************** - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/network-devices" \ - -d '{"hostname": "fooHost", "region_id": 1, "ip_address": "1.1.1.4", "device_type": "NIC"}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+-----------------+------+---------+-------------------------------------------------+ -| Name | In | Type | Description | -+=================+======+=========+=================================================+ -| network-device | body | object | - created_at | -| | | | - updated_at | -| | | | - hostname | -| | | | - id | -| | | | - cell_id | -| | | | - region_id | -| | | | - parent_id | -| | | | - ip_address | -| | | | - device_type | -| | | | - model_name | -| | | | - os_version | -| | | | - vlans | -| | | | - interface_id | -| | | | - network_id | -| | | | - active | -| | | | - labels | -| | | | - note | -| | | | - variables | -+-----------------+------+---------+-------------------------------------------------+ -| created_at | body | string | Timestamp of network device creation | -+-----------------+------+---------+-------------------------------------------------+ -| updated_at | body | string | Timestamp of last network device update | -+-----------------+------+---------+-------------------------------------------------+ -| hostname | body | string | Name of the host of the device | -+-----------------+------+---------+-------------------------------------------------+ -| id | body | integer | Unique ID of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| cell_id | body | integer | Unique ID of the network device's cell | -+-----------------+------+---------+-------------------------------------------------+ -| region_id | body | integer | Unique ID of the network device's region | -+-----------------+------+---------+-------------------------------------------------+ -| parent_id | body | integer | ID of the network device's parent | -+-----------------+------+---------+-------------------------------------------------+ -| ip_address | body | string | IP address of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| device_type | body | string | Type of device | -+-----------------+------+---------+-------------------------------------------------+ -| model_name | body | string | Model name of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| os_version | body | string | Operating system version of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| vlans | body | string | virtual local area networks of the device | -+-----------------+------+---------+-------------------------------------------------+ -| interface_id | body | integer | Unique ID of the interface of the device | -+-----------------+------+---------+-------------------------------------------------+ -| network_id | body | integer | Unique ID of the network of the device | -+-----------------+------+---------+-------------------------------------------------+ -| active | body | boolean | State of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| labels | body | string | User defined labels | -+-----------------+------+---------+-------------------------------------------------+ -| note | body | string | Note used for governance | -+-----------------+------+---------+-------------------------------------------------+ -| variables | body | object | User defined variables | -+-----------------+------+---------+-------------------------------------------------+ - -Example Network Device Create -***************************** - -.. code-block:: json - - { - "cell_id": null, - "device_type": "NIC", - "id": 6, - "ip_address": "1.1.1.4", - "model_name": null, - "os_version": null, - "parent_id": null, - "project_id": "717e9a21-6e2d-44e0-bc84-8398563bda06", - "region_id": 1, - "vlans": null - } - -List Network Device -=================== - -:GET: /v1/network-devices?region_id= - -Gets all network devices in a region - -Normal response codes: OK(200) - -Error response codes: invalid request(400), device not found(404), validation exception(405) - -Default response: unexpected error - -Request -------- - -+------------+------+---------+---------+--------------------------------+ -| Name | In | Type | Required| Description | -+============+======+=========+=========+================================+ -| region_id | query| integer | Yes | ID of the region to get device | -+------------+------+---------+---------+--------------------------------+ -| id | query| integer | No | ID of the network device to get| -+------------+------+---------+---------+--------------------------------+ -| name | query| string | No | Name of the device to get | -+------------+------+---------+---------+--------------------------------+ -| cell_id | query| integer | No | Name of the cell to get | -+------------+------+---------+---------+--------------------------------+ -| ip_address | query| string | No | IP address of the host to get | -+------------+------+---------+---------+--------------------------------+ -| device_type| query| string | No | Type of host to get | -+------------+------+---------+---------+--------------------------------+ -| vars | query| string | No | Variable filters to get device | -+------------+------+---------+---------+--------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Example Network Device List -*************************** - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/network-devices?region_id=1" \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+-----------------+------+---------+-------------------------------------------------+ -| Name | In | Type | Description | -+=================+======+=========+=================================================+ -| network-device | body | array | array of network device | -+-----------------+------+---------+-------------------------------------------------+ -| created_at | body | string | Timestamp of network device creation | -+-----------------+------+---------+-------------------------------------------------+ -| updated_at | body | string | Timestamp of last network device update | -+-----------------+------+---------+-------------------------------------------------+ -| hostname | body | string | Name of the host of the device | -+-----------------+------+---------+-------------------------------------------------+ -| id | body | integer | Unique ID of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| cell_id | body | integer | Unique ID of the network device's cell | -+-----------------+------+---------+-------------------------------------------------+ -| region_id | body | integer | Unique ID of the network device's region | -+-----------------+------+---------+-------------------------------------------------+ -| parent_id | body | integer | ID of the network device's parent | -+-----------------+------+---------+-------------------------------------------------+ -| ip_address | body | string | IP address of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| device_type | body | string | Type of device | -+-----------------+------+---------+-------------------------------------------------+ -| model_name | body | string | Model name of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| os_version | body | string | Operating system version of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| vlans | body | string | virtual local area networks of the device | -+-----------------+------+---------+-------------------------------------------------+ -| interface_id | body | integer | Unique ID of the interface of the device | -+-----------------+------+---------+-------------------------------------------------+ -| network_id | body | integer | Unique ID of the network of the device | -+-----------------+------+---------+-------------------------------------------------+ -| active | body | boolean | State of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| labels | body | string | User defined labels | -+-----------------+------+---------+-------------------------------------------------+ -| note | body | string | Note used for governance | -+-----------------+------+---------+-------------------------------------------------+ -| variables | body | object | User defined variables | -+-----------------+------+---------+-------------------------------------------------+ - -Example Network Device List -*************************** - -.. code-block:: json - - [ - { - "cell_id": null, - "device_type": "NIC", - "id": 6, - "ip_address": "1.1.1.4", - "model_name": null, - "os_version": null, - "parent_id": null, - "project_id": "717e9a21-6e2d-44e0-bc84-8398563bda06", - "region_id": 1, - "vlans": null - }, - { - "cell_id": null, - "device_type": "Bridge", - "id": 8, - "ip_address": "1.1.1.8", - "model_name": null, - "os_version": null, - "parent_id": null, - "project_id": "717e9a21-6e2d-44e0-bc84-8398563bda06", - "region_id": 1, - "vlans": null - } - ] - -.. todo:: **Example Unexpected Error** - - ..literalinclude:: ./api_samples/errors/errors-unexpected-resp.json - :language: javascript - -Update Network Device -===================== - -:PUT: /v1/network-devices/{id} - -Update an existing network device - -Normal response codes: OK(200) - -Error response codes: invalid request(400), device not found(404), validation exception(405) - -Request -------- - -+-----------------+------+---------+-------------------------------------------------+ -| Name | In | Type | Description | -+=================+======+=========+=================================================+ -| hostname | body | string | Name of the host of the device | -+-----------------+------+---------+-------------------------------------------------+ -| ip_address | body | string | IP address of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| device_type | body | string | Type of device | -+-----------------+------+---------+-------------------------------------------------+ -| model_name | body | string | Model name of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| os_version | body | string | Operating system version of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| vlans | body | string | virtual local area networks of the device | -+-----------------+------+---------+-------------------------------------------------+ -| active | body | boolean | State of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| labels | body | string | User defined labels | -+-----------------+------+---------+-------------------------------------------------+ -| note | body | string | Note used for governance | -+-----------------+------+---------+-------------------------------------------------+ - -Example Network Device Update -***************************** - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/network-devices/6" \ - -XPUT \ - -d '{"hostname": "newHostName", "ip_address": "0.0.0.0"}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+-----------------+------+---------+-------------------------------------------------+ -| Name | In | Type | Description | -+=================+======+=========+=================================================+ -| created_at | body | string | Timestamp of network device creation | -+-----------------+------+---------+-------------------------------------------------+ -| updated_at | body | string | Timestamp of last network device update | -+-----------------+------+---------+-------------------------------------------------+ -| hostname | body | string | Name of the host of the device | -+-----------------+------+---------+-------------------------------------------------+ -| id | body | integer | Unique ID of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| cell_id | body | integer | Unique ID of the network device's cell | -+-----------------+------+---------+-------------------------------------------------+ -| region_id | body | integer | Unique ID of the network device's region | -+-----------------+------+---------+-------------------------------------------------+ -| parent_id | body | integer | ID of the network device's parent | -+-----------------+------+---------+-------------------------------------------------+ -| ip_address | body | string | IP address of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| device_type | body | string | Type of device | -+-----------------+------+---------+-------------------------------------------------+ -| model_name | body | string | Model name of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| os_version | body | string | Operating system version of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| vlans | body | string | virtual local area networks of the device | -+-----------------+------+---------+-------------------------------------------------+ -| interface_id | body | integer | Unique ID of the interface of the device | -+-----------------+------+---------+-------------------------------------------------+ -| network_id | body | integer | Unique ID of the network of the device | -+-----------------+------+---------+-------------------------------------------------+ -| active | body | boolean | State of the network device | -+-----------------+------+---------+-------------------------------------------------+ -| labels | body | string | User defined labels | -+-----------------+------+---------+-------------------------------------------------+ -| note | body | string | Note used for governance | -+-----------------+------+---------+-------------------------------------------------+ -| variables | body | object | User defined variables | -+-----------------+------+---------+-------------------------------------------------+ - -Example Network Device Update -***************************** - -.. code-block:: json - - { - "cell_id": null, - "device_type": "NIC", - "id": 6, - "ip_address": "0.0.0.0", - "model_name": null, - "os_version": null, - "parent_id": null, - "project_id": "717e9a21-6e2d-44e0-bc84-8398563bda06", - "region_id": 1, - "vlans": null - } - -Update Network Device Variables -=============================== - -:PUT: /v1/network-devices/{id}/variables - -Update user defined variables for the network device - -Normal response codes: OK(200) - -Error response codes: invalid request(400), device not found(404), validation exception(405) - -Request -------- - -+--------+------+---------+----------------------------------------------+ -| Name | In | Type | Description | -+========+======+=========+==============================================+ -| key | body | string | Identifier | -+--------+------+---------+----------------------------------------------+ -| value | body | object | Data | -+--------+------+---------+----------------------------------------------+ -| id | path | integer | Unique ID of the network device to be updated| -+--------+------+---------+----------------------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Example Network Device Variables Update -*************************************** - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/network-devices/6/variables" \ - -XPUT \ - -d '{"newVar": "sample variable"}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+--------+------+---------+-------------------------+ -| Name | In | Type | Description | -+========+======+=========+=========================+ -| key | body | string | Identifier | -+--------+------+---------+-------------------------+ -| value | body | object | Data | -+--------+------+---------+-------------------------+ - -Example Network Device Variables Update -*************************************** - -.. code-block:: json - - { - "variables": - { - "newVar": "sample variable" - } - } - -Delete Network Device -===================== - -:DELETE: /v1/network-devices/{id} - -Deletes an existing record of a network device - -Normal response codes: no content(204) - -Error response codes: invalid request(400), device not found(404) - -Request -------- - -+--------+------+---------+----------------------------------------------+ -| Name | In | Type | Description | -+========+======+=========+==============================================+ -| id | path | integer | Unique ID of the network device to be deleted| -+--------+------+---------+----------------------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE - -Delete Network Device Variables -=============================== - -:DELETE: /v1/network-devices/{id}/variables - -Delete existing key/value variables for the network device - -Normal response codes: no content(204) - -Error response codes: invalid request(400), device not found(404) validation exception(405) - -Request -------- - -+--------+------+---------+--------------------------------+ -| Name | In | Type | Description | -+========+======+=========+================================+ -| id | path | integer | Unique ID of the network device| -+--------+------+---------+--------------------------------+ -| key | body | string | Identifier to be deleted | -+--------+------+---------+--------------------------------+ -| value | body | object | Data to be deleted | -+--------+------+---------+--------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE diff --git a/doc/source/readme.rst b/doc/source/readme.rst deleted file mode 100644 index a6210d3..0000000 --- a/doc/source/readme.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../../README.rst diff --git a/doc/source/regions.rst b/doc/source/regions.rst deleted file mode 100644 index 062af0e..0000000 --- a/doc/source/regions.rst +++ /dev/null @@ -1,402 +0,0 @@ -.. _regions: - -======= -Regions -======= - -Definition of region - -Create Region -============= - -:POST: /v1/region - -Creates a new Region - -Normal response codes: OK(201) - -Error response codes: invalid request(400), validation exception(405) - -Request -------- - -+----------+------+---------+--------------------------+ -| Name | In | Type | Description | -+==========+======+=========+==========================+ -| name | body | string | Unique name of the region| -+----------+------+---------+--------------------------+ -| labels | body | string | User defined labels | -+----------+------+---------+--------------------------+ -| note | body | string | Note used for governance | -+----------+------+---------+--------------------------+ -| variables| body | object | User defined variables | -+----------+------+---------+--------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Example Region Create -********************* - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/regions" \ - -d '{"name": "DFW"}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+-----------+------+---------+--------------------------+ -| Name | In | Type | Description | -+===========+======+=========+==========================+ -| region | body | object | - id | -| | | | - name | -| | | | - cells | -| | | | - labels | -| | | | - note | -| | | | - variables | -+-----------+------+---------+--------------------------+ -| id | body | integer | Unique ID of the region | -+-----------+------+---------+--------------------------+ -| name | body | string | Unique name of the region| -+-----------+------+---------+--------------------------+ -| cells | body | array | Array of cells | -+-----------+------+---------+--------------------------+ -| labels | body | string | User defined labels | -+-----------+------+---------+--------------------------+ -| note | body | string | Note used for governance | -+-----------+------+---------+--------------------------+ -| variables | body | object | User defined variables | -+-----------+------+---------+--------------------------+ - -Example Region Create -********************* - -.. code-block:: json - - { - "id": 1, - "name": "DFW", - "note": null - } - -List Regions -============ - -:GET: /v1/regions - -Gets all Regions - -Normal response codes: OK(200) - -Error response codes: invalid request(400), validation exception(405) - -Default response: unexpected error - -Request -------- -No parameters - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Example Region List -******************* - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/regions" \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+-----------+------+---------+--------------------------+ -| Name | In | Type | Description | -+===========+======+=========+==========================+ -| regions | body | array | Array of regions | -+-----------+------+---------+--------------------------+ -| id | body | integer | Unique ID of the region | -+-----------+------+---------+--------------------------+ -| name | body | string | Unique name of the region| -+-----------+------+---------+--------------------------+ -| cells | body | array | Array of cells in region | -+-----------+------+---------+--------------------------+ -| labels | body | string | User defined labels | -+-----------+------+---------+--------------------------+ -| note | body | string | Note used for governance | -+-----------+------+---------+--------------------------+ -| variables | body | object | User defined variables | -+-----------+------+---------+--------------------------+ - -Example Region List -******************* - -.. code-block:: bash - - [ - { - "id": 1, - "name": "DFW", - "note": null - }, - { - "id": 2, - "name": "DFW2", - "note": null - }, - { - "id": 3, - "name": "fooRegion", - "note": null - } - ] - -.. todo:: **Example Unexpected Error** - - ..literalinclude:: ./api_samples/errors/errors-unexpected-resp.json - :language: javascript - -Update Region -============= - -:PUT: /v1/regions/{id} - -Update an existing region - -Normal response codes: OK(200) - -Error response codes: invalid request(400), region not found(404), validation exception(405) - -Request -------- - -+-----------+------+---------+--------------------------------------+ -| Name | In | Type | Description | -+===========+======+=========+======================================+ -| name | body | string | Unique name of the region | -+-----------+------+---------+--------------------------------------+ -| cells | body | array | Array of cells in region | -+-----------+------+---------+--------------------------------------+ -| labels | body | string | User defined labels | -+-----------+------+---------+--------------------------------------+ -| note | body | string | Note used for governance | -+-----------+------+---------+--------------------------------------+ -| id | path | integer | Unique ID of the region to be updated| -+-----------+------+---------+--------------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Example Region Update -********************* - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/regions/3" \ - -XPUT \ - -d '{"name": "DFW3"}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+-----------+------+---------+--------------------------+ -| Name | In | Type | Description | -+===========+======+=========+==========================+ -| region | body | object | - id | -| | | | - name | -| | | | - cells | -| | | | - labels | -| | | | - note | -| | | | - variables | -+-----------+------+---------+--------------------------+ -| id | body | integer | Unique ID of the region | -+-----------+------+---------+--------------------------+ -| name | body | string | Unique name of the region| -+-----------+------+---------+--------------------------+ -| cells | body | array | Array of cells in region | -+-----------+------+---------+--------------------------+ -| labels | body | string | User defined labels | -+-----------+------+---------+--------------------------+ -| note | body | string | Note used for governance | -+-----------+------+---------+--------------------------+ -| variables | body | object | User defined variables | -+-----------+------+---------+--------------------------+ - -Example Region Update -********************* - -.. code-block:: json - - { - "id": 3, - "name": "DFW3", - "note": null, - "project_id": "717e9a21-6e2d-44e0-bc84-8398563bda06" - } - -Update Region Variables -======================= - -:PUT: /v1/regions/{id}/variables - -Update user defined variables for the region - -Normal response codes: OK(200) - -Error response codes: invalid request(400), region not found(404), validation exception(405) - -Request -------- - -+----------+------+---------+--------------------------------------+ -| Name | In | Type | Description | -+==========+======+=========+======================================+ -| key | body | string | Identifier | -+----------+------+---------+--------------------------------------+ -| value | body | object | Data | -+----------+------+---------+--------------------------------------+ -| id | path | integer | Unique ID of the region to be updated| -+----------+------+---------+--------------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Example Region Variables Update -******************************* - -.. code-block:: bash - - curl -i "http://${MY_IP}:7780/v1/regions/3/variables" \ - -XPUT \ - -d '{“array”: [2]}' \ - -H "Content-Type: application/json" \ - -H "X-Auth-Token: demo" \ - -H "X-Auth-User: demo" \ - -H "X-Auth-Project: 717e9a216e2d44e0bc848398563bda06" - -Response --------- - -+--------+------+---------+-------------------------+ -| Name | In | Type | Description | -+========+======+=========+=========================+ -| key | body | string | Identifier | -+--------+------+---------+-------------------------+ -| value | body | object | Data | -+--------+------+---------+-------------------------+ - -Example Region Variables Update -******************************* - -.. code-block:: json - - { - "variables": - { - "string": "sample text", - "value": 24, - "array": [2] - } - } - -Delete Region -============= - -:DELETE: /v1/regions/{id} - -Deletes an existing record of a Region - -Normal response codes: no content(204) - -Error response codes: invalid request(400), region not found(404) - -Request -------- - -+------+------+---------+--------------------------------------+ -| Name | In | Type | Description | -+======+======+=========+======================================+ -| id | path | integer | Unique ID of the region to be deleted| -+------+------+---------+--------------------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: applicaton/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE - -Delete Region Variables -======================= - -:DELETE: /v1/regions/{id}/variables - -Delete existing key/value variables for the region - -Normal response codes: no content(204) - -Error response codes: invalid request(400), region not found(404) validation exception(405) - -Request -------- - -+-------+------+---------+-------------------------+ -| Name | In | Type | Description | -+=======+======+=========+=========================+ -| id | path | integer | Unique ID of the region | -+-------+------+---------+-------------------------+ -| key | body | string | Identifier to be deleted| -+-------+------+---------+-------------------------+ -| value | body | object | Data to be deleted | -+-------+------+---------+-------------------------+ - -Required Header -^^^^^^^^^^^^^^^ - -- Content-Type: application/json -- X-Auth-Token -- X-Auth-User -- X-Auth-Project - -Response --------- - -No body content is returned on a successful DELETE diff --git a/doc/source/specs/approved/template.rst b/doc/source/specs/approved/template.rst deleted file mode 120000 index dc9373e..0000000 --- a/doc/source/specs/approved/template.rst +++ /dev/null @@ -1 +0,0 @@ -../template.rst \ No newline at end of file diff --git a/doc/source/specs/implemented/.placeholder b/doc/source/specs/implemented/.placeholder deleted file mode 100644 index e69de29..0000000 diff --git a/doc/source/specs/implemented/list-devices.rst b/doc/source/specs/implemented/list-devices.rst deleted file mode 100644 index aaecdf9..0000000 --- a/doc/source/specs/implemented/list-devices.rst +++ /dev/null @@ -1,373 +0,0 @@ -.. - This work is licensed under a Creative Commons Attribution 3.0 Unported - License. - - http://creativecommons.org/licenses/by/3.0/legalcode - -=============== -Listing Devices -=============== - -https://blueprints.launchpad.net/craton/+spec/list-devices - -Craton has separate endpoints for different types of device. Devices of -different types can be linked in a parent-child relationship. Craton does not -offer a mechanism to easily display devices of different types making queries -tracking relationships cumbersome. - - -Problem description -=================== - -As a operator I want to be able to list a device's descendants so that I can -visualise or operate on a collection of related devices. - -Currently Craton supports two types of devices - hosts and network-devices. -Devices include the optional attribute parent_id to create a parent-child -relationship between two devices. So if one has two network-devices and one is -the parent of the other, the network-devices endpoint can be queried to find -the child device using the ID of the parent, e.g. - - GET /v1/network-devices?parent_id=1 - -If a third device is added, this time as a child device of the second device, -it is not possible to directly identify it from the root device. A second -query would need to be made using the ID of the second device, e.g. - - GET /v1/network-devices?parent_id=2 - -This means to represent a complete tree could potentially require a large -number of queries or the client would need to get all the devices and then link -them up itself. - -In addition, given that both a host and a network-device can have the same -parent, currently both endpoints need to be queried for any parent_id to get -all the devices. - -Proposed change -=============== - -To meet the needs of the user story and resolve the problems outlined above, -this spec proposes the introduction of a new endpoint for devices to allow for -the querying of devices as a whole. - -The endpoint will be /v1/devices and will support: - -- querying against a set of attributes common to all devices -- optionally including the descendants of any query - -Alternatives ------------- - -- the traversal of the tree could be left to the client, this would likely be a - slow process for large deployments -- the existing endpoints, i.e. /v1/hosts and /v1/network-devices, could be - allowed to return other types of device however this is likely to be -confusing and lead to mistakes uses the output. - -Data model impact ------------------ - -None - -REST API impact ---------------- - -Endpoint: /v1/devices -Method: GET -Description: List project devices -Normal response code: 200 -Expected error response codes: 400 - -Parameters schema: { - "type": "object", - "additionalProperties": False, - "properties": { - "id": { - "type": "integer", - }, - "region_id": { - "type": "integer", - }, - "cell_id": { - "type": "integer", - }, - "parent_id": { - "type": "integer", - }, - "active": { - "type": "boolean", - }, - "descendants": { - "default": False, - "type": "boolean", - }, - }, -} - -Response schema: { - "type": "object", - "additionalProperties": False, - "properties": { - "devices": { - "type": "object", - "additionalProperties": False, - "properties": { - "hosts": { - "type": "array", - "items": DefinitionsHost, - }, - "network-devices": { - "type": "array", - "items": DefinitionNetworkDeviceResponse, - }, - }, - }, - "links": DefinitionsPaginationLinks, - }, - }, -} - -Example: -Request - http://example.com/v1/devices -Response -{ - "devices": { - "hosts": [ - { - "active": true, - "cell_id": 4, - "created_at": "2017-02-16T14:28:55.000000", - "device_type": "server", - "id": 20, - "ip_address": "192.168.1.20", - "links": [ - { - "href": "http://example.com/v1/cells/4", - "rel": "up" - } - ], - "name": "host1.DFW.C0002.C-2.example2.com", - "note": null, - "parent_id": null, - "project_id": "b9f10eca-66ac-4c27-9c13-9d01e65f96b4", - "region_id": 2, - "updated_at": null - } - ... more hosts ..., - ], - "network-devices": [ - { - "access_secret_id": null, - "active": true, - "cell_id": 4, - "created_at": "2017-02-16T14:28:55.000000", - "device_type": "switch", - "id": 16, - "ip_address": "10.10.1.1", - "links": [ - { - "href": "http://example.com/v1/cells/4", - "rel": "up" - } - ], - "model_name": "model-x", - "name": "switch1.C0002.DFW.example.com", - "os_version": "version-1", - "parent_id": null, - "project_id": "b9f10eca-66ac-4c27-9c13-9d01e65f96b4", - "region_id": 2, - "updated_at": null, - "vlans": null - }, - ... more network-devices ..., - ], - }, - "links": [ - { - "href": "http://example.com/v1/devices?sort_dir=asc&limit=30&sort_keys=created_at%2Cid", - "rel": "first" - }, - { - "href": "http://example.com/v1/devices?sort_dir=asc&limit=30&sort_keys=created_at%2Cid", - "rel": "prev" - }, - { - "href": "http://example.com/v1/devices?sort_dir=asc&limit=30&sort_keys=created_at%2Cid", - "rel": "self" - }, - { - "href": "http://example.com/v1/devices?sort_dir=asc&limit=30&sort_keys=created_at%2Cid&marker=20", - "rel": "next" - } - ] -} - -Example: -Request - http://example.com/v1/devices?parent_id=16&descendants=true -Response -{ - "devices": { - "network-devices": [ - { - "access_secret_id": null, - "active": true, - "cell_id": 4, - "created_at": "2017-02-16T14:28:55.000000", - "device_type": "switch", - "id": 17, - "ip_address": "10.10.1.2", - "links": [ - { - "href": "http://example.com/v1/network-devices/16", - "rel": "up" - } - ], - "model_name": "model-x", - "name": "switch2.C0002.DFW.example.com", - "os_version": "version-1", - "parent_id": 16, - "project_id": "b9f10eca-66ac-4c27-9c13-9d01e65f96b4", - "region_id": 2, - "updated_at": null, - "vlans": null - }, - { - "access_secret_id": null, - "active": true, - "cell_id": 4, - "created_at": "2017-02-16T14:28:55.000000", - "device_type": "switch", - "id": 18, - "ip_address": "10.10.1.3", - "links": [ - { - "href": "http://example.com/v1/network-devices/17", - "rel": "up" - } - ], - "model_name": "model-x", - "name": "switch3.C0002.DFW.example.com", - "os_version": "version-1", - "parent_id": 17, - "project_id": "b9f10eca-66ac-4c27-9c13-9d01e65f96b4", - "region_id": 2, - "updated_at": null, - "vlans": null - }, - ], - "hosts": [ - { - "active": true, - "cell_id": 4, - "created_at": "2017-02-16T14:28:55.000000", - "device_type": "server", - "id": 200, - "ip_address": "192.168.1.20", - "links": [ - { - "href": "http://example.com/v1/network-devices/16", - "rel": "up" - } - ], - "name": "host10.DFW.C0002.C-2.example2.com", - "note": null, - "parent_id": 16, - "project_id": "b9f10eca-66ac-4c27-9c13-9d01e65f96b4", - "region_id": 2, - "updated_at": null - }, - ], - }, - "links": [ - { - "href": "http://example.com/v1/devices?parent_id=16&descendants=true&sort_dir=asc&limit=30&sort_keys=created_at%2Cid", - "rel": "first" - }, - { - "href": "http://example.com/v1/devices?parent_id=16&descendants=true&sort_dir=asc&limit=30&sort_keys=created_at%2Cid", - "rel": "prev" - }, - { - "href": "http://example.com/v1/devices?parent_id=16&descendants=true&sort_dir=asc&limit=30&sort_keys=created_at%2Cid", - "rel": "self" - }, - { - "href": "http://example.com/v1/devices?parent_id=16&descendants=true&sort_dir=asc&limit=30&sort_keys=created_at%2Cid&marker=20", - "rel": "next" - } - ] -} - -Security impact ---------------- - -None - -Notifications impact --------------------- - -None - -Other end user impact ---------------------- - -- /v1/devices with need to be supported by the client. - -Performance Impact ------------------- - -Given the nature of this new endpoint, there is a strong likelihood that it -will be used for most requests where listing devices is required, even if the -user is only after one type. - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: -- git-harry - -Other contributors: -- None - -Work Items ----------- - -- add /v1/devices endpoint - -Dependencies -============ - -None - -Testing -======= - -A full set of functional and unit tests will need to be added. - -Documentation Impact -==================== - -The repo documentation will require updating but this is handled by the -project. - -References -========== - -None diff --git a/doc/source/specs/implemented/pagination-of-resources.rst b/doc/source/specs/implemented/pagination-of-resources.rst deleted file mode 100644 index 37f22a5..0000000 --- a/doc/source/specs/implemented/pagination-of-resources.rst +++ /dev/null @@ -1,336 +0,0 @@ -.. - This work is licensed under a Creative Commons Attribution 3.0 Unported - License. - - http://creativecommons.org/licenses/by/3.0/legalcode - -============================== - Pagination of List Resources -============================== - -https://blueprints.launchpad.net/craton/+spec/pagination-of-resources - -Craton is intended to manage large quantities of devices and other objects -without sacrificing performance. Craton needs to add pagination support in -order to efficiently handle queries on large collections. - - -Problem description -=================== - -In the current implementation, a request to one of our collection resources -will attempt to return all of the values that can be returned (based on -authentication, etc.). For example, if a user and project have access to 5000 -hosts then making a ``GET`` request against ``/v1/hosts`` would return all -5000. Such large result sets can and likely will slow down Craton's response -times and make it unusable. - - -Proposed change -=============== - -We propose adding pagination query parameters to all collection endpoints. The -new parameters would assume defaults if the user does not include them. - -We specifically propose that: - -#. Craton choose a default page size of 30 and limit it to being at least 10 - items and at most 100 items, - -#. Craton choose to make the next page both discoverable *and* calculable. In - other words, using "link" hypermedia relations in a response to indicate - first, previous, next, and last page URLs that are generated by the server - for the client, - -#. Craton should assume the defaults for requests that have no query - parameters. For example, if someone makes a ``GET`` request to - ``/v1/hosts`` it would imply an original page size of 30 and that the first - 30 results should be returned. - -To provide pagination to users, it is suggested that we use ``limit`` and -``marker`` parameters to indicate the page size and last seen ID. This allows -users to begin pagination after an item, rather than at a particular page. For -example, if a user is checking for new hosts in the listing and they know the -ID of the last host they encountered they can provide ``marker=:id&limit=30`` -to get the newer hosts. If instead, we used ``page`` and ``per_page`` there's -the possibility they'd miss items since hosts may have been deleted changing -the page number of the last host. - -This implies that the default ``limit`` value would be 30 and the default -``marker`` would be null (to indicate that no last ID is seen). - -This combination of parameters is practically the standard in OpenStack. -Operators familiar with OpenStack's existing Compute, Images, etc. APIs -will be familiar with these parameters. - -In addition to pagination parameters, this spec proposes adding link relations -in the Response body - as defined by JSON Hyper-Schema and `favored by the API -WG`_ - -This makes API usage easier for everyone, including, people using the API -directly and people writing API wrappers such as python-cratonclient. This -does, however, have the downside of affecting our response bodies and JSON -Schema - -Finally, I'd like to strongly propose that we include these links in each -response. Which relation types we include would depend on where in the -pagination the user is, but it would do something like this: - -#. Include a ``self`` relation for every page that tells the user exactly what - page they're presently on. - -#. If there is a page prior to the current one, we would include the ``prev`` - **and** ``first`` relations. These tell the user what the previous page is - and what the first page is. - -#. If there is a page after the current one, we would include the ``next`` - **and** ``last`` relations. These are the opposites to ``prev`` and - ``first`` respectively. - - It is worth noting that without properly implemented caching the ``last`` - relation, it could become computationally expensive to calculate for every - pagination query. - - -Alternatives ------------- - -Alternative query parameters to ``limit`` and ``marker`` are: - -#. Use ``page`` and ``per_page`` parameters to indicate the 1-indexed "page - number" and number of items on each page respectively. This means that - users can change how many items they get on each page request and can - resume in arbitrary places by specifying the ``page`` parameter. - - This would imply that the default ``page`` value would be 1 and the default - ``per_page`` would be 30. - - These two parameters are presently used by a significant number of large - APIs at the moment but are not common in OpenStack itself. They provide - simplicity in that if the API user wants to, they can just constantly - increment the page number to get the next page in the simplest way possible. - They don't have to calculate the next value from a combination of values in - the response of the last request. - - This does, however, prevent users from being able to resume iteration from - the last item it received in a list. Further, this adds the potential that - users may miss objects due to deletions or other changes in the - corresponding collection. Finally, these parameters only provide users an - opaque idea as to where in a paginated resource they are and how to resume - pagination. - -#. Use ``limit`` and ``offset`` parameters to provide similar functionality - and opacity to ``per_page`` and ``page`` respectively. - - The default ``limit`` would, again, be 30 and the default ``offset`` would - be 0. - - This combination of parameters is also present in a small number of - OpenStack projects but has some of the same negative implications as the - ``page`` and ``per_page`` parameters when compared to ``limit`` and - ``marker``. - -An alternative way to provide pagination links are: - -#. Link headers - as defined in :rfc:`6903` - using Relation Types defined in - :rfc:`5988`. - - These are also commonly used outside of OpenStack and were popular to the - creation of including the relations in the response body. The benefit to - Craton of using this method is that it doesn't effect our JSON Schema or - existing Response bodies. A major problem with this approach is that a - relation type can be repeated in a Link header. However, the HTTP library - used by the majority of the Python world - Requests - does not parse such - links correctly. Further, widespread support for parsing these header - values is not known to the author of this specification. - -Data model impact ------------------ - -This should have **no** impact on our data model. - -REST API impact ---------------- - -This specification will have two impacts on our REST API: - -#. It will add ``limit`` and ``marker`` query parameters that are identical to - a number of existing and future endpoints. - -#. It will change the fundamental structure of our list responses in order to - accommodate the link relations. - - At the moment, for example, a ``GET`` request made to ``/v1/hosts`` has a - response body that looks like: - - .. code-block:: json - - [ - { - "active": true, - "cell_id": null, - "device_type": "Computer", - "id": 1, - "ip_address": "12.12.12.15", - "name": "foo2Host", - "note": null, - "parent_id": null, - "region_id": 1 - }, - { - "active": true, - "cell_id": null, - "device_type": "Phone", - "id": 2, - "ip_address": "11.11.11.14", - "name": "fooHost", - "note": null, - "parent_id": null, - "region_id": 1 - } - ] - - This would need to transform to - - .. code-block:: json - - { - "items": [ - { - "active": true, - "cell_id": null, - "device_type": "Computer", - "id": 1, - "ip_address": "12.12.12.15", - "name": "foo2Host", - "note": null, - "parent_id": null, - "region_id": 1 - }, - { - "active": true, - "cell_id": null, - "device_type": "Phone", - "id": 2, - "ip_address": "11.11.11.14", - "name": "fooHost", - "note": null, - "parent_id": null, - "region_id": 1 - } - ], - "links": [ - { - "rel": "first", - "href": "https://craton.environment.com/v1/hosts?limit=30" - }, - { - "rel": "next", - "href": "https://craton.environment.com/v1/hosts?limit=30&marker=2" - }, - { - "rel": "self", - "href": "https://craton.environment.com/v1/hosts?limit=30&marker=1" - } - ] - } - - -Security impact ---------------- - -Pagination suppport reduces the potential attack surface for denial of service -attacks aimed at Craton. It alone, however, is not sufficient to prevent DoS -attacks and additional measures should be taken by deployers to further -mitigate those possibilities. - -Notifications impact --------------------- - -Craton does not yet have notifications. - -Other end user impact ---------------------- - -This will have a minor affect on python-cratonclient. The ``list`` calls it -implements will need to become smarter so they can handle pagination for the -user automatically. - -Performance Impact ------------------- - -There should not be any performance impact on the service created by this code -although it will frequently be called. - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: -- icordasc - -Other contributors: -- None - -Work Items ----------- - -- Add basic pagination support with tests to ensure that functionality works - independent of the other features proposed in this specification - -- Add link relation support to response bodies - - -Dependencies -============ - -N/A - - -Testing -======= - -This should be tested on different levels, but at a minimum on a functional -level. - - -Documentation Impact -==================== - -This will impact our API reference documentation - - -References -========== - -* `IANA Link Relations Registry`_ - -* :rfc:`5988` - -* :rfc:`6903` - -* `JSON Hyper-Schema`_ - -* `"Pagination, Filtering, and Sorting" by the OpenStack API WG`_ - -.. _favored by the API WG: - http://specs.openstack.org/openstack/api-wg/guidelines/links.html -.. _IANA Link Relations Registry: - https://www.iana.org/assignments/link-relations/link-relations.xhtml -.. _JSON Hyper-Schema: - http://json-schema.org/latest/json-schema-hypermedia.html -.. _"Pagination, Filtering, and Sorting" by the OpenStack API WG: - http://specs.openstack.org/openstack/api-wg/guidelines/pagination_filter_sort.html diff --git a/doc/source/specs/implemented/url-structure-and-design.rst b/doc/source/specs/implemented/url-structure-and-design.rst deleted file mode 100644 index c012df4..0000000 --- a/doc/source/specs/implemented/url-structure-and-design.rst +++ /dev/null @@ -1,170 +0,0 @@ -.. - This work is licensed under a Creative Commons Attribution 3.0 Unported - License. - - http://creativecommons.org/licenses/by/3.0/legalcode - -================================= - Craton URL Structure and Design -================================= - -:Blueprint: https://blueprints.launchpad.net/craton/+spec/url-structure-and-design - -Craton developers decided to start modifying the URL structure and semantics -prior to creating a release. This has led down a number of paths which require -documentation and understanding prior to resolving ourselves on one such -structure and semantic meaning. - - -Problem description -=================== - -Presently, Craton's API requires query parameters for certain calls. For -example, - -- To list hosts, one **must** specify a region ID: - - .. code:: - - GET /v1/hosts?region_id=1 - -- To list cells, one **must** specify a region ID: - - .. code:: - - GET /v1/cells?region_id=1 - -To make the API easier to use for others, as well as easier to use when -performing checks across the inventory, Craton is looking to remove required -query parameters. - - -Proposed change -=============== - -Query parameters are typically optional and have always been a poorly considered -choice for a required parameter. Instead, the Craton team proposes that we -adopt a flat URL structure and design while continuing to allow filtering -based on attributes that were formerly required. - -Now users will be able to list all hosts and cells that their project allows -them to view: - -.. code:: - - GET /v1/hosts - GET /v1/cells - -While also allowing them to filter those based on variables and other -attributes: - -.. code:: - - GET /v1/hosts?vars=operating_system:ubuntu - GET /v1/cells?region_id=1 - -This change, however, will increase the priority of completing work around -adding pagination support to Craton. As such, adding support for pagination is -a work item of this specification. - -Alternatives ------------- - -We could retain our current way of using query parameters. This, however, is -unseemly, unusual, and an unpleasant experience for users. If we were to -continue requiring parameters, e.g., ``region_id``, we would instead be -adopting a dfifferent URL structure. - -Data model impact ------------------ - -There are no database or data model impacts implied by this change. - -REST API impact ---------------- - -This makes the API easier to use and reason about for users new to Craton's -API. - -Security impact ---------------- - -Proper pagination support is necessary to prevent requests returning large -collections of resources. - -Notifications impact --------------------- - -Craton does not presently have notifications, so there is no impact. - -Other end user impact ---------------------- - -This will affect the command-line interface to cratonclient. As region IDs are -no longer necessary for listing resources, that requirement will need to be -relaxed in our parameter handling. - -Performance Impact ------------------- - -With proper pagination, this should have a neglible (if any) impact on -Craton's performance. - -Other deployer impact ---------------------- - -This will not affect people who are deploying Craton. - -Developer impact ----------------- - -This has no other developer impact beyond API usage. - - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: -- git-harry - -Other contributors: -- icordasc - -Work Items ----------- - -- Refactor API layer to stop requiring parameters in the query string (See - also: https://review.openstack.org/408016) - -- Add pagination support for endpoints returning collections of resources. - - -Dependencies -============ - -N/A - - -Testing -======= - -We will update and continue to use our current functional testing. - - -Documentation Impact -==================== - -This will affect the API reference section of our documentation. - - -References -========== - -* https://review.openstack.org/408016 - -* https://review.openstack.org/400198 - -* https://review.openstack.org/401958 diff --git a/doc/source/specs/index.rst b/doc/source/specs/index.rst deleted file mode 100644 index d20152b..0000000 --- a/doc/source/specs/index.rst +++ /dev/null @@ -1,25 +0,0 @@ -========================================= - Fleet Management Service Specifications -========================================= - -All current approved Craton API specifications: - -.. toctree:: - :glob: - :maxdepth: 1 - - approved/* - - -All implemented Craton API specifications: - -.. toctree:: - :glob: - :maxdepth: 1 - - implemented/* - -Indices and Tables -================== - -* :ref:`search` diff --git a/doc/source/specs/template.rst b/doc/source/specs/template.rst deleted file mode 100644 index bf46b15..0000000 --- a/doc/source/specs/template.rst +++ /dev/null @@ -1,318 +0,0 @@ -.. - This work is licensed under a Creative Commons Attribution 3.0 Unported - License. - - http://creativecommons.org/licenses/by/3.0/legalcode - -========================================== -Example Spec - The title of your blueprint -========================================== - -Include the URL of your launchpad blueprint: - -https://blueprints.launchpad.net/craton/+spec/example - -Introduction paragraph -- why are we doing anything? A single paragraph of -prose that operators can understand. - -Some notes about using this template: - -* Your spec should be in ReSTructured text, like this template. - -* Please wrap text at 79 columns. - -* The filename in the git repository should match the launchpad URL, for - example a URL of: https://blueprints.launchpad.net/craton/+spec/awesome-thing - should be named awesome-thing.rst - -* Please do not delete any of the sections in this template. If you have - nothing to say for a whole section, just write: None - -* For help with syntax, see http://sphinx-doc.org/rest.html - -* To test out your formatting, build the docs using tox, or see: - https://www.siafoo.net/reST.xml - -* If you would like to provide a diagram with your spec, ascii diagrams are - required. http://asciiflow.com/ is a very nice tool to assist with making - ascii diagrams. The reason for this is that the tool used to review specs is - based purely on plain text. Plain text will allow review to proceed without - having to look at additional files which can not be viewed in gerrit. It - will also allow inline feedback on the diagram itself. - -* If your specification proposes any changes to the Craton REST API such - as changing parameters which can be returned or accepted, or even - the semantics of what happens when a client calls into the API, then - you should add the APIImpact flag to the commit message. Specifications with - the APIImpact flag can be found with the following query: - - https://review.openstack.org/#/q/status:open+project:openstack/craton+message:apiimpact,n,z - - -Problem description -=================== - -A detailed description of the problem: - -* For a new feature this might be use cases. Ensure you are clear about the - actors in each use case: End User vs Deployer - -* For a major reworking of something existing it would describe the - problems in that feature that are being addressed. - - -Proposed change -=============== - -Here is where you cover the change you propose to make in detail. How do you -propose to solve this problem? - -If this is one part of a larger effort make it clear where this piece ends. In -other words, what's the scope of this effort? - -Alternatives ------------- - -What other ways could we do this thing? Why aren't we using those? This doesn't -have to be a full literature review, but it should demonstrate that thought has -been put into why the proposed solution is an appropriate one. - -Data model impact ------------------ - -Changes which require modifications to the data model often have a wider impact -on the system. The community often has strong opinions on how the data model -should be evolved, from both a functional and performance perspective. It is -therefore important to capture and gain agreement as early as possible on any -proposed changes to the data model. - -Questions which need to be addressed by this section include: - -* What new data objects and/or database schema changes is this going to - require? - -* What database migrations will accompany this change? - -* How will the initial set of new data objects be generated? For example if you - need to take into account existing images, or modify other existing data, - describe how that will work. - -REST API impact ---------------- - -An /api directory is now included for REST API updates. Each API method which is either added or changed should have the following: - -* Specification for the method - - * A description of what the method does suitable for use in - user documentation - - * Method type (POST/PUT/GET/DELETE/PATCH) - - * Normal http response code(s) - - * Expected error http response code(s) - - * A description for each possible error code should be included - describing semantic errors which can cause it such as - inconsistent parameters supplied to the method, or when an - instance is not in an appropriate state for the request to - succeed. Errors caused by syntactic problems covered by the JSON - schema definition do not need to be included. - - * URL for the resource - - * Parameters which can be passed via the URL - - * JSON schema definition for the body data if allowed - - * JSON schema definition for the response data if any - -* Example use case including typical API samples for both data supplied - by the caller and the response - -* Discuss any policy changes, and discuss what things a deployer needs to - think about when defining their policy. - -Example JSON schema definitions can be found in the Craton tree -http://git.openstack.org/cgit/openstack/craton/api/v1/schemas.py - -Note that the schema should be defined as restrictively as -possible. Parameters which are required should be marked as such and -only under exceptional circumstances should additional parameters -which are not defined in the schema be permitted (i.e. -additionalProperties should be False). - -Reuse of existing predefined parameter types such as regexps for -passwords and user defined names is highly encouraged. - -Security impact ---------------- - -Describe any potential security impact on the system. Some of the items to -consider include: - -* Does this change touch sensitive data such as tokens, keys, or user data? - -* Does this change alter the API in a way that may impact security, such as - a new way to access sensitive information or a new way to login? - -* Does this change involve cryptography or hashing? - -* Does this change require the use of sudo or any elevated privileges? - -* Does this change involve using or parsing user-provided data? This could - be directly at the API level or indirectly such as changes to a cache layer. - -* Can this change enable a resource exhaustion attack, such as allowing a - single API interaction to consume significant server resources? Some examples - of this include launching subprocesses for each connection, or entity - expansion attacks in XML. - -For more detailed guidance, please see the OpenStack Security Guidelines as -a reference (https://wiki.openstack.org/wiki/Security/Guidelines). These -guidelines are a work in progress and are designed to help you identify -security best practices. For further information, feel free to reach out -to the OpenStack Security Group at openstack-security@lists.openstack.org. - -Notifications impact --------------------- - -Please specify any changes to notifications. This includes introduction of a -new notification, changes to an existing notification, or removing a -notification. - -Other end user impact ---------------------- - -Aside from the API, are there other ways a user will interact with this -feature? - -* Does this change have an impact on python-cratonclient? What does the user - interface there look like? - -Performance Impact ------------------- - -Describe any potential performance impact on the system. How often will new -code be called? Is there a major change to the calling pattern of existing -code? - -Examples of things to consider here include: - -* A small change in a utility function or a commonly used decorator can have a - large impact on performance. - -* Calls which result in database queries can have a profound impact on - performance when called in critical sections of the code. - -* Will the change include any locking, and if so what considerations are there - on holding the lock? - -Other deployer impact ---------------------- - -Discuss things that will affect how you deploy and configure OpenStack -that have not already been mentioned, such as: - -* Is this a change that takes immediate effect after its merged, or is it - something that has to be explicitly enabled? - -* If this change is a new binary, how would it be deployed? - -* Please state anything that those doing continuous deployment, or those - upgrading from the previous release, need to be aware of. Also describe - any plans to deprecate configuration values or features. For example, if we - change the directory name that widgets are stored in, how do we handle - widget directories created before the change landed? Do we move them? Do - we have a special case in the code? Do we assume that the operator will - recreate all the widgets in their cloud? - -Developer impact ----------------- - -Discuss things that will affect other developers working on OpenStack, -such as: - -* If the blueprint proposes a change to the store API, discussion of how - stores would implement the feature is required. - - -Implementation -============== - -Assignee(s) ------------ - -Who is leading the writing of the code? Or is this a blueprint where you're -throwing it out there to see who picks it up? - -If more than one person is working on the implementation, please designate the -primary author and contact. - -Primary assignee: -- - -Other contributors: -- - -Work Items ----------- - -Work items or tasks -- break the feature up into the things that need to be -done to implement it. Those parts might end up being done by different people, -but we're mostly trying to understand the timeline for implementation. - - -Dependencies -============ - -* Include specific references to specs and/or blueprints in craton, or in other - projects, that this one either depends on or is related to. - -* If this requires functionality of another project that is not currently used - by Craton: document that fact. - -* Does this feature require any new library dependencies or code otherwise not - included in OpenStack? Or does it depend on a specific version of library? - - -Testing -======= - -Please discuss how the change will be tested. We especially want to know what -tempest tests will be added. It is assumed that unit test coverage will be -added so that doesn't need to be mentioned explicitly, but discussion of why -you think unit tests are sufficient and we don't need to add more tempest -tests would need to be included. - -Is this untestable in gate given current limitations (specific hardware / -software configurations available)? If so, are there mitigation plans (3rd -party testing, gate enhancements, etc). - - -Documentation Impact -==================== - -What is the impact on the docs team of this change? Some changes might require -donating resources to the docs team to have the documentation updated. Don't -repeat details discussed above, but please reference them here. - - -References -========== - -Please add any useful references here. You are not required to have any -reference. Moreover, this specification should still make sense when your -references are unavailable. Examples of what you could include are: - -* Links to mailing list or IRC discussions - -* Links to notes from a summit session - -* Links to relevant research, if appropriate - -* Related specifications as appropriate (e.g., if it's an EC2 thing, link the - EC2 docs) - -* Anything else you feel it is worthwhile to refer to diff --git a/doc/source/usage.rst b/doc/source/usage.rst deleted file mode 100755 index b79a905..0000000 --- a/doc/source/usage.rst +++ /dev/null @@ -1,9 +0,0 @@ -======== -Usage -======== - -Import ------- -To use craton in a project:: - - import craton diff --git a/etc/craton-api-conf.sample b/etc/craton-api-conf.sample deleted file mode 100644 index cadcf21..0000000 --- a/etc/craton-api-conf.sample +++ /dev/null @@ -1,44 +0,0 @@ -[DEFAULT] -# Set Debug level -debug=True -# Log file for craton inventory -log_file=craton-api.log - -[api] -# Host IP to use for API service -host=0.0.0.0 -# Port to use for API service -port=7780 -# Paste Config file to use -api_paste_config=/craton/etc/craton-api-paste.ini -# The name of the Paste pipeline to use for Craton. -# -# Pipelines are organized according to authentication scheme. The available -# choices are: -# -# - ``local-auth`` (the default) Uses Craton's default authentication and -# authorization scheme -# - ``keystone-auth`` Uses Keystone for identity, authentication, and -# authorization -paste_pipeline = local-auth - - -# Keystone Middlewre settings -[keystone_authtoken] -#auth_uri = https://127.0.0.1:5000 -#auth_url = https://127.0.0.1:35357/v3 -#project_name = service -#username = craton -#password = -#project_domain_id = default -#user_domain_id = default -#auth_type = password - -[database] -backend = sqlalchemy -connection = mysql+pymysql://craton:craton@localhost/craton -use_db_reconnect = false -db_retry_interval = 1 -db_inc_retry_interval = true -db_max_retry_interval = 5 -db_max_retries = 5 diff --git a/etc/craton-api-paste.ini b/etc/craton-api-paste.ini deleted file mode 100644 index c041b0c..0000000 --- a/etc/craton-api-paste.ini +++ /dev/null @@ -1,24 +0,0 @@ -[pipeline:local-auth] -pipeline = request_id localauthcontext api_v1 - -[pipeline:keystone-auth] -pipeline = request_id authtoken keystonecontext api_v1 - -[app:api_v1] -paste.app_factory = craton.api:create_app - -[filter:noauthcontext] -paste.filter_factory = craton.api.middleware:NoAuthContextMiddleware.factory - -[filter:localauthcontext] -paste.filter_factory = craton.api.middleware:LocalAuthContextMiddleware.factory - -[filter:authtoken] -paste.filter_factory = keystonemiddleware.auth_token:filter_factory -delay_auth_decision = true - -[filter:keystonecontext] -paste.filter_factory = craton.api.middleware:KeystoneContextMiddleware.factory - -[filter:request_id] -paste.filter_factory = oslo_middleware:RequestId.factory diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index b8e35be..0000000 --- a/requirements.txt +++ /dev/null @@ -1,28 +0,0 @@ -# The order of packages is significant, because pip processes them in the order -# of appearance. Changing the order has an impact on the overall integration -# process, which may cause wedges in the gate later. - -decorator>=3.4.0 # BSD -Flask!=0.11,<1.0,>=0.10 # BSD -Flask-RESTful>=0.3.5 # BSD -jsonschema!=2.5.0,<3.0.0,>=2.0.0 # MIT -jsonpath-rw>=1.2.0,<2.0 # Apache-2.0 -kazoo>=2.2 # Apache-2.0 -keystonemiddleware>=4.12.0 # Apache-2.0 -oslo.db>=4.15.0 # Apache-2.0 -oslo.i18n>=2.1.0 # Apache-2.0 -oslo.middleware>=3.10.0 # Apache-2.0 -oslo.context>=2.12.0 # Apache-2.0 -oslo.config!=3.18.0,>=3.14.0 # Apache-2.0 -oslo.log>=3.11.0 # Apache-2.0 -oslo.serialization>=1.10.0 # Apache-2.0 -oslo.utils>=3.20.0 # Apache-2.0 -PasteDeploy>=1.5.0 # MIT -Paste # MIT -pbr>=2.0.0 # Apache-2.0 -SQLAlchemy>=1.1.0 -SQLAlchemy-Utils # BSD License -PyMySQL>=0.7.6 # MIT License -stevedore>=1.20.0 # Apache-2.0 -taskflow>=2.7.0 # Apache-2.0 -zake>=0.1.6 # Apache-2.0 diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 5460b41..0000000 --- a/setup.cfg +++ /dev/null @@ -1,53 +0,0 @@ -[metadata] -name = craton -summary = Platform for fleet management for OpenStack. -description-file = - README.rst -author = OpenStack -author-email = openstack-dev@lists.openstack.org -home-page = http://craton.readthedocs.org -classifier = - Environment :: OpenStack - Intended Audience :: Information Technology - Intended Audience :: System Administrators - License :: OSI Approved :: Apache Software License - Operating System :: POSIX :: Linux - Programming Language :: Python - Programming Language :: Python :: 3 - Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.5 - -[files] -packages = - craton - -[entry_points] -console_scripts = - craton-api = craton.cmd.api:main - craton-worker = craton.cmd.worker:main - craton-dbsync = craton.cmd.dbsync:main - -craton.workflow = - testflow = craton.workflow.testflow:TestFlow - -[build_sphinx] -source-dir = doc/source -build-dir = doc/build -all_files = 1 - -[upload_sphinx] -upload-dir = doc/build/html - -[compile_catalog] -directory = craton/locale -domain = craton - -[update_catalog] -domain = craton -output_dir = craton/locale -input_file = craton/locale/craton.pot - -[extract_messages] -keywords = _ gettext ngettext l_ lazy_gettext -mapping_file = babel.cfg -output_file = craton/locale/craton.pot diff --git a/setup.py b/setup.py deleted file mode 100644 index 566d844..0000000 --- a/setup.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT -import setuptools - -# In python < 2.7.4, a lazy loading of package `pbr` will break -# setuptools if some other modules registered functions in `atexit`. -# solution from: http://bugs.python.org/issue15881#msg170215 -try: - import multiprocessing # noqa -except ImportError: - pass - -setuptools.setup( - setup_requires=['pbr>=2.0.0'], - pbr=True) diff --git a/test-requirements.txt b/test-requirements.txt deleted file mode 100644 index b0403fd..0000000 --- a/test-requirements.txt +++ /dev/null @@ -1,25 +0,0 @@ -# The order of packages is significant, because pip processes them in the order -# of appearance. Changing the order has an impact on the overall integration -# process, which may cause wedges in the gate later. - -# Needed for testing -flake8<2.6.0,>=2.5.4 # MIT -Flask!=0.11,<1.0,>=0.10 # BSD -Flask-RESTful>=0.3.5 # BSD -fixtures>=3.0.0 # Apache-2.0/BSD -jsonschema!=2.5.0,<3.0.0,>=2.0.0 # MIT -nose # LGPL -nose-exclude # LGPL -mock>=2.0 # BSD -oslotest>=1.10.0 # Apache-2.0 -sphinx>=1.5.1 # BSD -testtools>=1.4.0 # MIT - -# Documentation -oslosphinx>=4.7.0 # Apache-2.0 - -# Functional testing -docker-py>=1.8.1 # Apache-2.0 -requests!=2.12.2,!=2.13.0,>=2.10.0 # Apache-2.0 -retrying!=1.3.0,>=1.2.3 # Apache-2.0 -PyMySQL>=0.7.6 # MIT License diff --git a/tools/craton-inventory.py b/tools/craton-inventory.py deleted file mode 100644 index 4857327..0000000 --- a/tools/craton-inventory.py +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env python - -""" -Craton Inventory Helper: A sample client script to use with Craton api -to drive Ansible playbooks. - -Description: - Generate Inventory that Ansible can understand by querying Craton Inventory - API. Craton will provide a fully formatted inventory json back that can - simply be used to drive ansible playbooks. -Configuration: - CRATON_INVENTORY_URL : should be the url pointing to your installation of - Craton Inventory service. - X-Auth-Project: Craton API X-Auth-Project - X-Auth-User: Craton API X-Auth-User - X-Auth-Token: Craton API X-Auth-Token - REGION: Environment variable to indicate which Region we are generating - the inventory for. - -Usage: - REGION=1 ansible-playbook -i cratoninventory.py playbook.yaml --list-hosts -""" - -import argparse -import json -import os -import requests -import sys - - -CRATON_INVENTORY_URL = "http://:/v1/ansible-inventory?region=%s" - - -def parse_args(): - parser = argparse.ArgumentParser(description='Inventory for Ansible') - group = parser.add_mutually_exclusive_group(required=True) - group.add_argument('--list', action='store_true', - help='List active servers') - group.add_argument('--host', help='List details about the specific host') - - return parser.parse_args() - - -def main(args): - - try: - env_name = os.environ['REGION'] - except KeyError as e: - sys.stderr.write('Unable to load %s\n' % e.message) - sys.exit(1) - - headers = { - "Content-Type": "application/json", - "X-Auth-Token": "", - "X-Auth-User": "", - "X-Auth-Project": "" - } - - try: - url = CRATON_INVENTORY_URL % env_name - resp = requests.get(url, headers=headers, verify=False) - if resp.status_code != 200: - print("Got non 200 response from Craton Inventory API") - sys.exit(1) - except Exception: - print("Error generating inventory from Craton Inventory API") - sys.exit(1) - - return resp.json() - - -if __name__ == '__main__': - args = parse_args() - output = main(args) - print(json.dumps(output)) - sys.exit(0) diff --git a/tools/cratonclient_with_keystone.py b/tools/cratonclient_with_keystone.py deleted file mode 100644 index 74895f9..0000000 --- a/tools/cratonclient_with_keystone.py +++ /dev/null @@ -1,72 +0,0 @@ -"""An example usage of cratonclient when working with Craton and Keystone. - -Requires: - - - Craton to be configured using Keystone for identity (configuring - etc/craton-api-conf.sample [keystone_authtoken] section and - etc/craton-api-paste.ini to use keystoneauthcontext) - - etc/craton-api-conf.sample should include something that looks like: - - .. code-block:: ini - - # etc/craton-api-conf.sample - [keystone_authtoken] - auth_host = 127.0.0.1 - auth_port = 5000 - auth_version = 3 - auth_protocol = http - project_name = service - username = craton - password = craton - project_domain_id = default - user_domain_id = default - auth_type = password - - While etc/craton-api.paste.ini should include something that looks like: - - .. code-block:: ini - - [pipeline:main] - pipeline = request_id keystonecontext api_v1 - - - Installing python-cratonclient - -""" - -from keystoneauth1.identity.v3 import password as password_auth -from keystoneauth1 import session as ksa_session - -from cratonclient import session -from cratonclient.v1 import client - -KEYSTONE_DOMAIN = '127.0.0.1' -KEYSTONE_PORT = '5000' -USERNAME = 'admin' -PASSWORD = 'secretepassword' -PROJECT_NAME = 'admin' -PROJECT_DOMAIN_NAME = 'Default' -USER_DOMAIN_NAME = 'Default' -AUTH_URL = 'http://{domain}:{port}/v3'.format(domain=KEYSTONE_DOMAIN, - port=KEYSTONE_PORT) - - -admin_auth = password_auth.Password( - auth_url=AUTH_URL, - password=PASSWORD, - username=USERNAME, - user_domain_name=USER_DOMAIN_NAME, - project_name=PROJECT_NAME, - project_domain_name=PROJECT_DOMAIN_NAME, -) -craton_session = session.Session( - session=ksa_session.Session(auth=admin_auth, - verify=False), -) -craton = client.Client( - session=craton_session, - url='http://127.0.0.1:7780/', -) - -inventory = craton.inventory(1) -hosts = inventory.hosts.list() diff --git a/tools/docker_run.sh b/tools/docker_run.sh deleted file mode 100755 index 3aa3763..0000000 --- a/tools/docker_run.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash - -/usr/bin/mysqld_safe > /dev/null 2>&1 & - -RET=1 -while [[ RET -ne 0 ]]; do - echo "=> Waiting for confirmation of MySQL service startup" - sleep 5 - mysql -uroot -e "status" > /dev/null 2>&1 - RET=$? -done - -mysql -uroot -e "SET GLOBAL log_output = 'TABLE';SET GLOBAL general_log = 'ON';" -mysql -uroot -e "CREATE DATABASE craton CHARACTER SET = 'utf8'" -mysql -uroot -e "GRANT ALL PRIVILEGES ON craton.* TO 'craton'@'%' IDENTIFIED BY 'craton'" -mysqladmin flush-privileges - -############### -# Run db-sync # -############## -/craton/bin/craton-dbsync --config-file=/craton/etc/craton-api-conf.sample upgrade - -#################################### -# Create initial project and users # -#################################### -# NOTE(sulo): One initial bootstrap project with root user will be created by the -# bootstrap process. Users can docker logs -f to view their api-key -# to use with the client. -/craton/bin/craton-dbsync --config-file=etc/craton-api-conf.sample bootstrap - -######################### -# Start the API service # -######################### -/craton/bin/craton-api --config-file=/craton/etc/craton-api-conf.sample diff --git a/tools/generate_fake_data.py b/tools/generate_fake_data.py deleted file mode 100644 index a4b1748..0000000 --- a/tools/generate_fake_data.py +++ /dev/null @@ -1,317 +0,0 @@ -import argparse -from ipaddress import ip_address -import json -import requests -import sys - -CLOUDS = [{"CLOUD1": { - "openstack_release": "juno"}}, - {"CLOUD2": { - "openstack_release": "kilo"}}] - -REGIONS = [{'ORD135': { - "glance_default_store": "swift", - "neutron_l2_population": True, - "tempest_public_subnet_cidr": "192.168.1.0/22", - "nova_console_type": "novnc"}}, - {'DFW': { - "glance_default_store": "swift", - "neutron_l2_population": True, - "tempest_public_subnet_cidr": "192.168.4.0/22", - "nova_console_type": "novnc"}}] - -CELLS = [{'C0001': {"cell_capabilities": "flavor_classes=performance2", - "console_host": "10.10.1.100"}}, - {'C0002': {"cell_capabilities": "flavor_classes=performance1", - "console_host": "10.20.1.100"}}] - - -def make_hosts(region, cell): - # no of hosts need to match ip_address available - no_of_hosts = 2 - cab1 = region + "." + cell + "." + "C-1" - cab2 = region + "." + cell + "." + "C-2" - - hosts = [] - for host in range(no_of_hosts): - hostname = "host%s.%s.example1.com" % (host, cab1) - hosts.append(hostname) - - for host in range(no_of_hosts): - hostname = "host%s.%s.example2.com" % (host, cab2) - hosts.append(hostname) - - return hosts - - -class Inventory(object): - def __init__(self, url, project_id, auth_user, auth_key): - self.url = url - self.auth_user = auth_user - self.auth_key = auth_key - self.project_id = project_id - self.region = None - self.cell = None - self.ip_addresses = self.generate_ip_addresses(32, u'192.168.1.5') - self.container_ips = self.generate_ip_addresses(128, u'172.0.0.2') - - self.headers = {"Content-Type": "application/json", - "X-Auth-Project": self.project_id, - "X-Auth-User": self.auth_user, - "X-Auth-Token": self.auth_key} - - def generate_ip_addresses(self, num_ips, starting_ip): - start_ip_address = ip_address(starting_ip) - ips = [str(start_ip_address + i) for i in range(num_ips)] - return ips - - def create_cloud(self, cloud, data=None): - cloud_url = self.url + "/clouds" - payload = {"name": cloud} - - print("Creating cloud entry for %s with data %s" % (payload, data)) - resp = requests.post(cloud_url, headers=self.headers, - data=json.dumps(payload), verify=False) - if resp.status_code != 201: - raise Exception(resp.text) - - self.cloud = resp.json() - if data: - reg_id = self.cloud["id"] - cloud_data_url = self.url + "/clouds/%s/variables" % reg_id - resp = requests.put(cloud_data_url, headers=self.headers, - data=json.dumps(data), verify=False) - if resp.status_code != 200: - print(resp.text) - - def create_region(self, region, data=None): - region_url = self.url + "/regions" - payload = {"name": region, "cloud_id": self.cloud.get("id")} - - print("Creating region entry for %s with data %s" % (payload, data)) - resp = requests.post(region_url, headers=self.headers, - data=json.dumps(payload), verify=False) - if resp.status_code != 201: - raise Exception(resp.text) - - self.region = resp.json() - if data: - reg_id = self.region["id"] - region_data_url = self.url + "/regions/%s/variables" % reg_id - resp = requests.put(region_data_url, headers=self.headers, - data=json.dumps(data), verify=False) - if resp.status_code != 200: - print(resp.text) - - def create_cell(self, cell, data=None): - region_url = self.url + "/cells" - payload = {"region_id": self.region.get("id"), - "cloud_id": self.cloud.get("id"), "name": cell} - - print("Creating cell entry %s with data %s" % (payload, data)) - resp = requests.post(region_url, headers=self.headers, - data=json.dumps(payload), verify=False) - if resp.status_code != 201: - raise Exception(resp.text) - - self.cell = resp.json() - if data: - c_id = resp.json()["id"] - region_data_url = self.url + "/cells/%s/variables" % c_id - resp = requests.put(region_data_url, headers=self.headers, - data=json.dumps(data), verify=False) - if resp.status_code != 200: - print(resp.text) - - def create_container(self, host_obj, data=None): - region_url = self.url + "/hosts" - - payload = {"region_id": host_obj.get("region_id"), - "cloud_id": host_obj.get("cloud_id"), - "cell_id": host_obj.get("cell_id"), - "ip_address": self.container_ips.pop(0), - "device_type": "container"} - - payload["parent_id"] = host_obj["id"] - name = "container_{}".format(host_obj["name"]) - payload["name"] = name - - print("Creating container entry %s with data %s" % (payload, data)) - container_obj = requests.post(region_url, headers=self.headers, - data=json.dumps(payload), verify=False) - if container_obj.status_code != 201: - raise Exception(container_obj.text) - - label_url = self.url + "/hosts/%s/labels" % container_obj.json()['id'] - labels = {"labels": ["container"]} - resp = requests.put(label_url, headers=self.headers, - data=json.dumps(labels), verify=False) - - if resp.status_code != 200: - print("Failed to create label for host %s" % name) - print(resp.text) - - def create_device(self, host, device_type, parent=None, data=None): - region_url = self.url + "/hosts" - payload = {"region_id": self.region.get("id"), - "cloud_id": self.cloud.get("id"), - "cell_id": self.cell.get("id"), - "name": host, - "ip_address": self.ip_addresses.pop(0), - "device_type": device_type} - - if parent is not None: - payload["parent_id"] = parent - - print("Creating host entry %s with data %s" % (payload, data)) - device_obj = requests.post(region_url, headers=self.headers, - data=json.dumps(payload), verify=False) - - if device_obj.status_code != 201: - raise Exception(device_obj.text) - - if data: - device_id = device_obj.json()["id"] - region_data_url = self.url + "/hosts/%s/variables" % device_id - resp = requests.put(region_data_url, headers=self.headers, - data=json.dumps(data), verify=False) - if resp.status_code != 200: - print(resp.text) - - label_url = self.url + "/hosts/%s/labels" % device_obj.json()["id"] - labels = {"labels": [device_type]} - resp = requests.put(label_url, headers=self.headers, - data=json.dumps(labels), verify=False) - - if resp.status_code != 200: - print("Failed to create label for host %s" % host) - print(resp.text) - - return device_obj.json() - - def create_network(self, name, cidr, gateway, netmask, block_type): - networks_url = self.url + "/networks" - payload = {"name": name, - "cidr": cidr, - "gateway": gateway, - "netmask": netmask, - "ip_block_type": block_type, - "cloud_id": self.cloud.get("id"), - "region_id": self.region.get("id"), - "cell_id": self.cell.get("id")} - - print("Creating new network: %s" % payload) - resp = requests.post(networks_url, headers=self.headers, - data=json.dumps(payload), verify=False) - if resp.status_code != 201: - raise Exception(resp.text) - - return resp.json() - - def create_netdevice(self, name, device_type): - network_devices_url = self.url + "/network-devices" - payload = {"name": name, - "model_name": "model-x", - "os_version": "version-1", - "device_type": device_type, - "ip_address": "10.10.1.1", - "active": True, - "cloud_id": self.cloud.get("id"), - "region_id": self.region.get("id"), - "cell_id": self.cell.get("id")} - - resp = requests.post(network_devices_url, headers=self.headers, - data=json.dumps(payload), verify=False) - if resp.status_code != 201: - raise Exception(resp.text) - - return resp.json() - - def create_net_interface(self, device, int_num, network=None): - netinterfaces_url = self.url + "/network-interfaces" - name = "eth%s" % int_num - payload = {"name": name, - "interface_type": "ethernet", - "vlan_id": 1, - "port": int_num, - "duplex": "full", - "speed": 1000, - "link": "up", - "device_id": device.get("id"), - "ip_address": "10.10.0.1"} - if network: - payload["network_id"] = network.get("id") - - print("Creating network interface %s on device %s for network %s" - % (name, device.get("id"), network.get("id"))) - resp = requests.post(netinterfaces_url, headers=self.headers, - data=json.dumps(payload), verify=False) - if resp.status_code != 201: - raise Exception(resp.text) - - return resp.json() - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument('--url', help='Endpoint for Craton Service') - parser.add_argument('--user', help='User id') - parser.add_argument('--project', help='Project id') - parser.add_argument('--key', help='Auth Key for the sevice') - args = parser.parse_args() - - if not args.url: - print("--url is required. This is your craton api endpoint") - sys.exit(1) - if not args.user: - print("--user is required. This is your craton user id.") - sys.exit(1) - if not args.project: - print("--project is required. This is your craton project id.") - sys.exit(1) - if not args.key: - print("--key is required. This is your craton auth key.") - sys.exit(1) - - Inv = Inventory(args.url, args.project, args.user, args.key) - - for cloud in CLOUDS: - # First create cloud - cloud_name = list(cloud.keys())[0] - Inv.create_cloud(cloud_name, data=cloud[cloud_name]) - - for region in REGIONS: - # Frist create region - region_name = list(region.keys())[0] - Inv.create_region(region_name, data=region[region_name]) - - for cell in CELLS: - cell_name = list(cell.keys())[0] - Inv.create_cell(cell_name, data=cell[cell_name]) - # Create a example private network for the cell - network_name = "private_net_%s" % cell_name - network = Inv.create_network(network_name, - "192.168.1.0", - "192.168.1.1", - "255.255.255.0", - "private") - # Create a ToR switch for this cell - _name = "switch1.%s.%s.example.com" % (cell_name, region_name) - switch = Inv.create_netdevice(_name, "switch") - # NOTE(sulo): Create 6 switch ports on the switch with the - # above network, the same switch can have other networks - # as well. - for int_num in range(5): - Inv.create_net_interface(switch, int_num, network=network) - # Get host in the cell - hosts = make_hosts(region_name, cell_name) - for host in hosts: - host_obj = Inv.create_device(host, 'server', - parent=switch['id']) - # Create container on each host - Inv.create_container(host_obj) - # Create network interface on the host to connect to the - # private network, the interfaces allows us to conncet this - # host to the switch or other devices, such that we can - # form logical or physical groupings such as a cab. - Inv.create_net_interface(host_obj, 0, network=network) diff --git a/tools/test-setup.sh b/tools/test-setup.sh deleted file mode 100755 index 89e8b0b..0000000 --- a/tools/test-setup.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/sh - -# This script will execute before OpenStack infrastructure revokes sudo -# We need it to allow the Jenkins user to interact with docker - -# This tells docker to use the current user's primary group to run the unix -# domain socket for docker. This side-steps the need for the current user to -# be added to the docker group and then have to log out and back in. -sudo dd of=/lib/systemd/system/docker.socket << _EOF_ -[Unit] -Description=Docker Socket for the API -PartOf=docker.service - -[Socket] -ListenStream=/var/run/docker.sock -SocketMode=0660 -SocketUser=root -SocketGroup=$(id -gn) - -[Install] -WantedBy=sockets.target -_EOF_ - -echo "=> Restarting docker" -sudo systemctl daemon-reload -sudo systemctl restart docker - -echo "=> Checking permissions on the socket" -stat /var/run/docker.sock - -echo "=> Discovering docker version installed" -docker version diff --git a/tools/wrapper-functions.sh b/tools/wrapper-functions.sh deleted file mode 100644 index 2af3ff6..0000000 --- a/tools/wrapper-functions.sh +++ /dev/null @@ -1,141 +0,0 @@ - -# Example usage. Note that we simply pass through httpie conventions, -# such as nested JSON with := -# (https://github.com/jkbrzt/httpie#non-string-json-fields) -# -# Run from Craton parent directory -# $ source tools/wrapper-functions.sh -# -# For Docker quick start: -# $ craton-docker-start -# $ eval $(craton-docker-env) -# -# Direct env quick start: -# $ craton-direct-start -# $ eval $(craton-direct-env) -# -# Populate with fake data: -# $ craton-fake-data -# -# API calls: -# $ craton-post v1/regions name=HKG -# $ craton-get v1/hosts -# $ craton-put v1/hosts/3 device_type=container -# $ craton-put v1/hosts/3/variables foo=47 bar:='["a", "b", "c"]' -# $ craton-delete v1/hosts/4 - -# NOTE assumes the installation of httpie so that the http command is available! - -fix-url() { - [[ "$1" =~ ^http ]] && echo $1 || echo "${CRATON_URL}/${1}" -} - -craton-get() { - http "$(fix-url $1)" \ - "Content-Type:application/json" \ - "X-Auth-Token:${OS_PASSWORD}" \ - "X-Auth-User:${OS_USERNAME}" \ - "X-Auth-Project:${OS_PROJECT_ID}" -} - -craton-post() { - http POST "$(fix-url $1)" \ - "Content-Type:application/json" \ - "X-Auth-Token:${OS_PASSWORD}" \ - "X-Auth-User:${OS_USERNAME}" \ - "X-Auth-Project:${OS_PROJECT_ID}" \ - "${@:2}" -} - -craton-put() { - http PUT "$(fix-url $1)" \ - "Content-Type:application/json" \ - "X-Auth-Token:${OS_PASSWORD}" \ - "X-Auth-User:${OS_USERNAME}" \ - "X-Auth-Project:${OS_PROJECT_ID}" \ - "${@:2}" -} - -craton-delete() { - http DELETE "$(fix-url $1)" \ - "Content-Type:application/json" \ - "X-Auth-Token:${OS_PASSWORD}" \ - "X-Auth-User:${OS_USERNAME}" \ - "X-Auth-Project:${OS_PROJECT_ID}" \ - "${@:2}" -} - -_craton-extract-env() { - echo OS_PROJECT_ID=$(echo "$1" | grep 'ProjectId' | awk '{print $2}' | tr -d '\r') - echo OS_USERNAME=$(echo "$1" | grep 'Username' | awk '{print $2}' | tr -d '\r') - echo OS_PASSWORD=$(echo "$1" | grep 'APIKey' | awk '{print $2}' | tr -d '\r') -} - -craton-docker-env() { - _craton-extract-env "$(docker logs craton-api)" - CRATON_PORT=$(sed -nr "/^\[api\]/ { :l /^port[ ]*=/ { s/.*=[ ]*//; p; q;}; n; b l;}" etc/craton-api-conf.sample) - echo CRATON_URL=http://$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' craton-api):${CRATON_PORT} -} - -craton-direct-env() { - _craton-extract-env "$(craton-dbsync --config-file=etc/craton-api-conf.dev bootstrap)" - CRATON_PORT=$(sed -nr "/^\[api\]/ { :l /^port[ ]*=/ { s/.*=[ ]*//; p; q;}; n; b l;}" etc/craton-api-conf.dev) - echo CRATON_URL=http://$(sed -nr "/^\[api\]/ { :l /^host[ ]*=/ { s/.*=[ ]*//; p; q;}; n; b l;}" etc/craton-api-conf.dev):${CRATON_PORT} -} - -craton-direct-start() { - # NOTE(jimbaker) Assumes MySQL user 'craton' is setup with password 'craton'!!! - - cat </dev/null - do - echo "Waiting for API server"; sleep 1 - done - - python3 tools/generate_fake_data.py \ - --url $CRATON_URL/v1 \ - --user $OS_USERNAME --project $OS_PROJECT_ID --key $OS_PASSWORD - - craton-get v1/regions -} - -craton-docker-start() { - echo "Starting Craton docker container..." - docker rm -f craton-api 2>/dev/null || true - docker build --pull -t craton-api:latest . - docker run -t --name craton-api -p 127.0.0.1:7780:7780 -d craton-api:latest - - CRATON_PORT=$(sed -nr "/^\[api\]/ { :l /^port[ ]*=/ { s/.*=[ ]*//; p; q;}; n; b l;}" etc/craton-api-conf.sample) - CRATON_URL=http://$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' craton-api):${CRATON_PORT} - OS_PROJECT_ID=probe - OS_USERNAME=probe - OS_PASSWORD=probe - until craton-get v1/regions 2>/dev/null - do - echo "Waiting for API server"; sleep 1 - done - - eval $(craton-docker-env) - craton-fake-data - - docker logs -f craton-api -} diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 4ce9b5b..0000000 --- a/tox.ini +++ /dev/null @@ -1,48 +0,0 @@ -[tox] -minversion = 2.0 -envlist = py35,pep8 -skipsdist = True - -[testenv] -basepython = python3.5 -usedevelop = True -setenv = - VIRTUAL_ENV={envdir} -deps = -r{toxinidir}/test-requirements.txt -commands = - find . -type f -name "*.pyc" -delete -whitelist_externals = find - -[testenv:py35] -commands = - {[testenv]commands} - # NOTE(thomasem): Exclude DB unit tests, since SQLite no longer works. Will forklift to functional with real DB. - nosetests -v --where={toxinidir}/craton/tests/unit --exclude-dir {toxinidir}/craton/tests/unit/db - -[testenv:functional] -commands = - {[testenv]commands} - nosetests -v --exclude-dir {toxinidir}/craton/tests/unit/db [] - -[testenv:pep8] -commands = flake8 {posargs} - -[testenv:venv] -commands = {posargs} - -[testenv:cover] -commands = python setup.py test --coverage --testr-args='{posargs}' - -[testenv:docs] -commands = python setup.py build_sphinx - -[testenv:debug] -commands = oslo_debug_helper {posargs} - -[flake8] -# E123, E125 skipped as they are invalid PEP-8. - -show-source = True -ignore = E123,E125,E402 -builtins = _ -exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build